diff --git a/sdk/datamigration/azure-mgmt-datamigration/CHANGELOG.md b/sdk/datamigration/azure-mgmt-datamigration/CHANGELOG.md
index 30846380536a..c43ef43f9cb4 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/CHANGELOG.md
+++ b/sdk/datamigration/azure-mgmt-datamigration/CHANGELOG.md
@@ -1,5 +1,83 @@
# Release History
+## 10.1.0b2 (2025-04-01)
+
+### Features Added
+
+ - Client `DataMigrationManagementClient` added operation group `database_migrations_mongo_to_cosmos_db_ru_mongo`
+ - Client `DataMigrationManagementClient` added operation group `database_migrations_mongo_to_cosmos_dbv_core_mongo`
+ - Client `DataMigrationManagementClient` added operation group `migration_services`
+ - Model `AzureBlob` added property `auth_type`
+ - Model `AzureBlob` added property `identity`
+ - Model `ProxyResource` added property `system_data`
+ - Model `Resource` added property `system_data`
+ - Enum `ResourceType` added member `MONGO_TO_COSMOS_DB_MONGO`
+ - Added enum `AuthType`
+ - Added model `DatabaseMigrationBase`
+ - Added model `DatabaseMigrationBaseListResult`
+ - Added model `DatabaseMigrationBaseProperties`
+ - Added model `DatabaseMigrationCosmosDbMongo`
+ - Added model `DatabaseMigrationCosmosDbMongoListResult`
+ - Added model `DatabaseMigrationPropertiesCosmosDbMongo`
+ - Added model `ErrorAdditionalInfo`
+ - Added model `ErrorDetail`
+ - Added model `ErrorResponse`
+ - Added model `ManagedServiceIdentity`
+ - Added enum `ManagedServiceIdentityType`
+ - Added model `MigrationService`
+ - Added model `MigrationServiceListResult`
+ - Added model `MigrationServiceUpdate`
+ - Added model `MongoConnectionInformation`
+ - Added model `MongoMigrationCollection`
+ - Added model `MongoMigrationProgressDetails`
+ - Added enum `MongoMigrationStatus`
+ - Added enum `ProvisioningState`
+ - Added model `ProxyResourceAutoGenerated`
+ - Added model `ResourceAutoGenerated`
+ - Added model `SystemDataAutoGenerated`
+ - Added model `TrackedResourceAutoGenerated`
+ - Added model `UserAssignedIdentity`
+ - Added model `DatabaseMigrationsMongoToCosmosDbRUMongoOperations`
+ - Added model `DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations`
+ - Added model `MigrationServicesOperations`
+ - Method `DatabaseMigrationsSqlDbOperations.begin_cancel` has a new overload `def begin_cancel(self: None, resource_group_name: str, sql_db_instance_name: str, target_db_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `DatabaseMigrationsSqlDbOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, sql_db_instance_name: str, target_db_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `DatabaseMigrationsSqlMiOperations.begin_cancel` has a new overload `def begin_cancel(self: None, resource_group_name: str, managed_instance_name: str, target_db_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `DatabaseMigrationsSqlMiOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, managed_instance_name: str, target_db_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `DatabaseMigrationsSqlMiOperations.begin_cutover` has a new overload `def begin_cutover(self: None, resource_group_name: str, managed_instance_name: str, target_db_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `DatabaseMigrationsSqlVmOperations.begin_cancel` has a new overload `def begin_cancel(self: None, resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `DatabaseMigrationsSqlVmOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `DatabaseMigrationsSqlVmOperations.begin_cutover` has a new overload `def begin_cutover(self: None, resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `FilesOperations.create_or_update` has a new overload `def create_or_update(self: None, group_name: str, service_name: str, project_name: str, file_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `FilesOperations.update` has a new overload `def update(self: None, group_name: str, service_name: str, project_name: str, file_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `ProjectsOperations.create_or_update` has a new overload `def create_or_update(self: None, group_name: str, service_name: str, project_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `ProjectsOperations.update` has a new overload `def update(self: None, group_name: str, service_name: str, project_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `ServiceTasksOperations.create_or_update` has a new overload `def create_or_update(self: None, group_name: str, service_name: str, task_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `ServiceTasksOperations.update` has a new overload `def update(self: None, group_name: str, service_name: str, task_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `ServicesOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, group_name: str, service_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `ServicesOperations.begin_update` has a new overload `def begin_update(self: None, group_name: str, service_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `ServicesOperations.check_children_name_availability` has a new overload `def check_children_name_availability(self: None, group_name: str, service_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `ServicesOperations.check_name_availability` has a new overload `def check_name_availability(self: None, location: str, parameters: IO[bytes], content_type: str)`
+ - Method `SqlMigrationServicesOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, sql_migration_service_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `SqlMigrationServicesOperations.begin_update` has a new overload `def begin_update(self: None, resource_group_name: str, sql_migration_service_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `SqlMigrationServicesOperations.delete_node` has a new overload `def delete_node(self: None, resource_group_name: str, sql_migration_service_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `SqlMigrationServicesOperations.regenerate_auth_keys` has a new overload `def regenerate_auth_keys(self: None, resource_group_name: str, sql_migration_service_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `TasksOperations.command` has a new overload `def command(self: None, group_name: str, service_name: str, project_name: str, task_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `TasksOperations.create_or_update` has a new overload `def create_or_update(self: None, group_name: str, service_name: str, project_name: str, task_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `TasksOperations.update` has a new overload `def update(self: None, group_name: str, service_name: str, project_name: str, task_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `DatabaseMigrationsMongoToCosmosDbRUMongoOperations.begin_create` has a new overload `def begin_create(self: None, resource_group_name: str, target_resource_name: str, migration_name: str, parameters: DatabaseMigrationCosmosDbMongo, content_type: str)`
+ - Method `DatabaseMigrationsMongoToCosmosDbRUMongoOperations.begin_create` has a new overload `def begin_create(self: None, resource_group_name: str, target_resource_name: str, migration_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations.begin_create` has a new overload `def begin_create(self: None, resource_group_name: str, target_resource_name: str, migration_name: str, parameters: DatabaseMigrationCosmosDbMongo, content_type: str)`
+ - Method `DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations.begin_create` has a new overload `def begin_create(self: None, resource_group_name: str, target_resource_name: str, migration_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `MigrationServicesOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, migration_service_name: str, parameters: MigrationService, content_type: str)`
+ - Method `MigrationServicesOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, migration_service_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `MigrationServicesOperations.begin_update` has a new overload `def begin_update(self: None, resource_group_name: str, migration_service_name: str, parameters: MigrationServiceUpdate, content_type: str)`
+ - Method `MigrationServicesOperations.begin_update` has a new overload `def begin_update(self: None, resource_group_name: str, migration_service_name: str, parameters: IO[bytes], content_type: str)`
+
+### Breaking Changes
+
+ - Method `TrackedResource.__init__` removed default value `None` from its parameter `location`
+
## 10.1.0b1 (2022-11-18)
### Features Added
diff --git a/sdk/datamigration/azure-mgmt-datamigration/README.md b/sdk/datamigration/azure-mgmt-datamigration/README.md
index 454ebbe1b86f..35a10bc3d383 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/README.md
+++ b/sdk/datamigration/azure-mgmt-datamigration/README.md
@@ -1,28 +1,61 @@
# Microsoft Azure SDK for Python
This is the Microsoft Azure Data Migration Client Library.
-This package has been tested with Python 3.7+.
+This package has been tested with Python 3.8+.
For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all).
## _Disclaimer_
_Azure SDK Python packages support for Python 2.7 has ended 01 January 2022. For more information and questions, please refer to https://github.com/Azure/azure-sdk-for-python/issues/20691_
-# Usage
+## Getting started
+### Prerequisites
-To learn how to use this package, see the [quickstart guide](https://aka.ms/azsdk/python/mgmt)
-
-For docs and references, see [Python SDK References](https://docs.microsoft.com/python/api/overview/azure/)
-Code samples for this package can be found at [Data Migration](https://docs.microsoft.com/samples/browse/?languages=python&term=Getting%20started%20-%20Managing&terms=Getting%20started%20-%20Managing) on docs.microsoft.com.
-Additional code samples for different Azure services are available at [Samples Repo](https://aka.ms/azsdk/python/mgmt/samples)
+- Python 3.8+ is required to use this package.
+- [Azure subscription](https://azure.microsoft.com/free/)
+### Install the package
-# Provide Feedback
+```bash
+pip install azure-mgmt-datamigration
+pip install azure-identity
+```
-If you encounter any bugs or have suggestions, please file an issue in the
-[Issues](https://github.com/Azure/azure-sdk-for-python/issues)
-section of the project.
+### Authentication
+
+By default, [Azure Active Directory](https://aka.ms/awps/aad) token authentication depends on correct configuration of the following environment variables.
+
+- `AZURE_CLIENT_ID` for Azure client ID.
+- `AZURE_TENANT_ID` for Azure tenant ID.
+- `AZURE_CLIENT_SECRET` for Azure client secret.
+
+In addition, Azure subscription ID can be configured via environment variable `AZURE_SUBSCRIPTION_ID`.
+
+With above configuration, client can be authenticated by following code:
+
+```python
+from azure.identity import DefaultAzureCredential
+from azure.mgmt.datamigration import DataMigrationManagementClient
+import os
+sub_id = os.getenv("AZURE_SUBSCRIPTION_ID")
+client = DataMigrationManagementClient(credential=DefaultAzureCredential(), subscription_id=sub_id)
+```
+## Examples
+Code samples for this package can be found at:
+- [Search Data Migration](https://docs.microsoft.com/samples/browse/?languages=python&term=Getting%20started%20-%20Managing&terms=Getting%20started%20-%20Managing) on docs.microsoft.com
+- [Azure Python Mgmt SDK Samples Repo](https://aka.ms/azsdk/python/mgmt/samples)
+
+
+## Troubleshooting
+
+## Next steps
+
+## Provide Feedback
+
+If you encounter any bugs or have suggestions, please file an issue in the
+[Issues](https://github.com/Azure/azure-sdk-for-python/issues)
+section of the project.
diff --git a/sdk/datamigration/azure-mgmt-datamigration/_meta.json b/sdk/datamigration/azure-mgmt-datamigration/_meta.json
index 26eab4282e26..79c36768efe0 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/_meta.json
+++ b/sdk/datamigration/azure-mgmt-datamigration/_meta.json
@@ -1,11 +1,11 @@
{
- "commit": "3ce1e043e2d0e57016437a3870f40e33da8a6397",
+ "commit": "d0d3608bda4c7df16c63056331971a0e89f22144",
"repository_url": "https://github.com/Azure/azure-rest-api-specs",
- "autorest": "3.9.2",
+ "autorest": "3.10.2",
"use": [
- "@autorest/python@6.2.7",
- "@autorest/modelerfour@4.24.3"
+ "@autorest/python@6.27.4",
+ "@autorest/modelerfour@4.27.0"
],
- "autorest_command": "autorest specification/datamigration/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.2.7 --use=@autorest/modelerfour@4.24.3 --version=3.9.2 --version-tolerant=False",
+ "autorest_command": "autorest specification/datamigration/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.27.4 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False",
"readme": "specification/datamigration/resource-manager/readme.md"
}
\ No newline at end of file
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/__init__.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/__init__.py
index 0b87fbde0c57..fe09662b09b9 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/__init__.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/__init__.py
@@ -5,15 +5,21 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._data_migration_management_client import DataMigrationManagementClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._data_migration_management_client import DataMigrationManagementClient # type: ignore
from ._version import VERSION
__version__ = VERSION
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -21,6 +27,6 @@
__all__ = [
"DataMigrationManagementClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_configuration.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_configuration.py
index 904245d9627a..6d8a12c3244c 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_configuration.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_configuration.py
@@ -6,26 +6,18 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-import sys
from typing import Any, TYPE_CHECKING
-from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy
from ._version import VERSION
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
-else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
-
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
-class DataMigrationManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
+class DataMigrationManagementClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
"""Configuration for DataMigrationManagementClient.
Note that all parameters used to create this instance are saved as instance
@@ -35,14 +27,13 @@ class DataMigrationManagementClientConfiguration(Configuration): # pylint: disa
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: Subscription ID that identifies an Azure subscription. Required.
:type subscription_id: str
- :keyword api_version: Api Version. Default value is "2022-03-30-preview". Note that overriding
+ :keyword api_version: Api Version. Default value is "2025-03-15-preview". Note that overriding
this default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None:
- super(DataMigrationManagementClientConfiguration, self).__init__(**kwargs)
- api_version: Literal["2022-03-30-preview"] = kwargs.pop("api_version", "2022-03-30-preview")
+ api_version: str = kwargs.pop("api_version", "2025-03-15-preview")
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
@@ -54,6 +45,7 @@ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs
self.api_version = api_version
self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"])
kwargs.setdefault("sdk_moniker", "mgmt-datamigration/{}".format(VERSION))
+ self.polling_interval = kwargs.get("polling_interval", 30)
self._configure(**kwargs)
def _configure(self, **kwargs: Any) -> None:
@@ -62,9 +54,9 @@ def _configure(self, **kwargs: Any) -> None:
self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs)
- self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs)
+ self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
self.authentication_policy = kwargs.get("authentication_policy")
if self.credential and not self.authentication_policy:
self.authentication_policy = ARMChallengeAuthenticationPolicy(
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_data_migration_management_client.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_data_migration_management_client.py
index 76c834fd9a92..4e8e89495557 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_data_migration_management_client.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_data_migration_management_client.py
@@ -8,18 +8,24 @@
from copy import deepcopy
from typing import Any, TYPE_CHECKING
+from typing_extensions import Self
+from azure.core.pipeline import policies
from azure.core.rest import HttpRequest, HttpResponse
from azure.mgmt.core import ARMPipelineClient
+from azure.mgmt.core.policies import ARMAutoResourceProviderRegistrationPolicy
from . import models as _models
from ._configuration import DataMigrationManagementClientConfiguration
from ._serialization import Deserializer, Serializer
from .operations import (
+ DatabaseMigrationsMongoToCosmosDbRUMongoOperations,
+ DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations,
DatabaseMigrationsSqlDbOperations,
DatabaseMigrationsSqlMiOperations,
DatabaseMigrationsSqlVmOperations,
FilesOperations,
+ MigrationServicesOperations,
Operations,
ProjectsOperations,
ResourceSkusOperations,
@@ -31,13 +37,20 @@
)
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
-class DataMigrationManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes
+class DataMigrationManagementClient: # pylint: disable=too-many-instance-attributes
"""Data Migration Client.
+ :ivar database_migrations_mongo_to_cosmos_db_ru_mongo:
+ DatabaseMigrationsMongoToCosmosDbRUMongoOperations operations
+ :vartype database_migrations_mongo_to_cosmos_db_ru_mongo:
+ azure.mgmt.datamigration.operations.DatabaseMigrationsMongoToCosmosDbRUMongoOperations
+ :ivar database_migrations_mongo_to_cosmos_dbv_core_mongo:
+ DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations operations
+ :vartype database_migrations_mongo_to_cosmos_dbv_core_mongo:
+ azure.mgmt.datamigration.operations.DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations
:ivar database_migrations_sql_db: DatabaseMigrationsSqlDbOperations operations
:vartype database_migrations_sql_db:
azure.mgmt.datamigration.operations.DatabaseMigrationsSqlDbOperations
@@ -49,6 +62,8 @@ class DataMigrationManagementClient: # pylint: disable=client-accepts-api-versi
azure.mgmt.datamigration.operations.DatabaseMigrationsSqlVmOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.datamigration.operations.Operations
+ :ivar migration_services: MigrationServicesOperations operations
+ :vartype migration_services: azure.mgmt.datamigration.operations.MigrationServicesOperations
:ivar sql_migration_services: SqlMigrationServicesOperations operations
:vartype sql_migration_services:
azure.mgmt.datamigration.operations.SqlMigrationServicesOperations
@@ -72,7 +87,7 @@ class DataMigrationManagementClient: # pylint: disable=client-accepts-api-versi
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
- :keyword api_version: Api Version. Default value is "2022-03-30-preview". Note that overriding
+ :keyword api_version: Api Version. Default value is "2025-03-15-preview". Note that overriding
this default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
@@ -89,12 +104,36 @@ def __init__(
self._config = DataMigrationManagementClientConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
- self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+ _policies = kwargs.pop("policies", None)
+ if _policies is None:
+ _policies = [
+ policies.RequestIdPolicy(**kwargs),
+ self._config.headers_policy,
+ self._config.user_agent_policy,
+ self._config.proxy_policy,
+ policies.ContentDecodePolicy(**kwargs),
+ ARMAutoResourceProviderRegistrationPolicy(),
+ self._config.redirect_policy,
+ self._config.retry_policy,
+ self._config.authentication_policy,
+ self._config.custom_hook_policy,
+ self._config.logging_policy,
+ policies.DistributedTracingPolicy(**kwargs),
+ policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None,
+ self._config.http_logging_policy,
+ ]
+ self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, policies=_policies, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
+ self.database_migrations_mongo_to_cosmos_db_ru_mongo = DatabaseMigrationsMongoToCosmosDbRUMongoOperations(
+ self._client, self._config, self._serialize, self._deserialize
+ )
+ self.database_migrations_mongo_to_cosmos_dbv_core_mongo = DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations(
+ self._client, self._config, self._serialize, self._deserialize
+ )
self.database_migrations_sql_db = DatabaseMigrationsSqlDbOperations(
self._client, self._config, self._serialize, self._deserialize
)
@@ -105,6 +144,9 @@ def __init__(
self._client, self._config, self._serialize, self._deserialize
)
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
+ self.migration_services = MigrationServicesOperations(
+ self._client, self._config, self._serialize, self._deserialize
+ )
self.sql_migration_services = SqlMigrationServicesOperations(
self._client, self._config, self._serialize, self._deserialize
)
@@ -116,7 +158,7 @@ def __init__(
self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize)
self.files = FilesOperations(self._client, self._config, self._serialize, self._deserialize)
- def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
+ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
@@ -136,14 +178,14 @@ def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
- return self._client.send_request(request_copy, **kwargs)
+ return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore
def close(self) -> None:
self._client.close()
- def __enter__(self) -> "DataMigrationManagementClient":
+ def __enter__(self) -> Self:
self._client.__enter__()
return self
- def __exit__(self, *exc_details) -> None:
+ def __exit__(self, *exc_details: Any) -> None:
self._client.__exit__(*exc_details)
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_serialization.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_serialization.py
index 2c170e28dbca..b24ab2885450 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_serialization.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_serialization.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -24,7 +25,6 @@
#
# --------------------------------------------------------------------------
-# pylint: skip-file
# pyright: reportUnnecessaryTypeIgnoreComment=false
from base64 import b64decode, b64encode
@@ -38,7 +38,21 @@
import re
import sys
import codecs
-from typing import Optional, Union, AnyStr, IO, Mapping
+from typing import (
+ Dict,
+ Any,
+ cast,
+ Optional,
+ Union,
+ AnyStr,
+ IO,
+ Mapping,
+ Callable,
+ TypeVar,
+ MutableMapping,
+ Type,
+ List,
+)
try:
from urllib import quote # type: ignore
@@ -48,12 +62,14 @@
import isodate # type: ignore
-from typing import Dict, Any, cast
-
-from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback
+from azure.core.exceptions import DeserializationError, SerializationError
+from azure.core.serialization import NULL as CoreNull
_BOM = codecs.BOM_UTF8.decode(encoding="utf-8")
+ModelType = TypeVar("ModelType", bound="Model")
+JSON = MutableMapping[str, Any]
+
class RawDeserializer:
@@ -74,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
:param data: Input, could be bytes or stream (will be decoded with UTF8) or text
:type data: str or bytes or IO
:param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
"""
if hasattr(data, "read"):
# Assume a stream
@@ -95,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
try:
return json.loads(data_as_str)
except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err)
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
elif "xml" in (content_type or []):
try:
@@ -107,7 +125,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
pass
return ET.fromstring(data_as_str) # nosec
- except ET.ParseError:
+ except ET.ParseError as err:
# It might be because the server has an issue, and returned JSON with
# content-type XML....
# So let's try a JSON load, and if it's still broken
@@ -126,7 +144,9 @@ def _json_attemp(data):
# The function hack is because Py2.7 messes up with exception
# context otherwise.
_LOGGER.critical("Wasn't XML not JSON, failing")
- raise_with_traceback(DeserializationError, "XML is invalid")
+ raise DeserializationError("XML is invalid") from err
+ elif content_type.startswith("text/"):
+ return data_as_str
raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
@classmethod
@@ -136,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
Use bytes and headers to NOT use any requests/aiohttp or whatever
specific implementation.
Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
"""
# Try to use content-type from headers if available
content_type = None
@@ -153,13 +178,6 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
return None
-try:
- basestring # type: ignore
- unicode_str = unicode # type: ignore
-except NameError:
- basestring = str
- unicode_str = str
-
_LOGGER = logging.getLogger(__name__)
try:
@@ -172,15 +190,30 @@ class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
- """UTF offset for UTC is 0."""
+ """UTF offset for UTC is 0.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The offset
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(0)
def tzname(self, dt):
- """Timestamp representation."""
+ """Timestamp representation.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The timestamp representation
+ :rtype: str
+ """
return "Z"
def dst(self, dt):
- """No daylight saving for UTC."""
+ """No daylight saving for UTC.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The daylight saving time
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(hours=1)
@@ -194,7 +227,7 @@ class _FixedOffset(datetime.tzinfo): # type: ignore
:param datetime.timedelta offset: offset in timedelta format
"""
- def __init__(self, offset):
+ def __init__(self, offset) -> None:
self.__offset = offset
def utcoffset(self, dt):
@@ -223,24 +256,26 @@ def __getinitargs__(self):
_FLATTEN = re.compile(r"(? None:
+ self.additional_properties: Optional[Dict[str, Any]] = {}
+ for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
elif k in self._validation and self._validation[k].get("readonly", False):
@@ -287,25 +329,35 @@ def __init__(self, **kwargs):
else:
setattr(self, k, kwargs[k])
- def __eq__(self, other):
- """Compare objects by comparing all attributes."""
+ def __eq__(self, other: Any) -> bool:
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
- def __ne__(self, other):
- """Compare objects by comparing all attributes."""
+ def __ne__(self, other: Any) -> bool:
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
return not self.__eq__(other)
- def __str__(self):
+ def __str__(self) -> str:
return str(self.__dict__)
@classmethod
- def enable_additional_properties_sending(cls):
+ def enable_additional_properties_sending(cls) -> None:
cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"}
@classmethod
- def is_xml_model(cls):
+ def is_xml_model(cls) -> bool:
try:
cls._xml_map # type: ignore
except AttributeError:
@@ -314,7 +366,11 @@ def is_xml_model(cls):
@classmethod
def _create_xml_node(cls):
- """Create XML node."""
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
try:
xml_map = cls._xml_map # type: ignore
except AttributeError:
@@ -322,8 +378,8 @@ def _create_xml_node(cls):
return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None))
- def serialize(self, keep_readonly=False, **kwargs):
- """Return the JSON that would be sent to azure from this model.
+ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
+ """Return the JSON that would be sent to server from this model.
This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`.
@@ -334,10 +390,17 @@ def serialize(self, keep_readonly=False, **kwargs):
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs)
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
- def as_dict(self, keep_readonly=True, key_transformer=attribute_transformer, **kwargs):
- """Return a dict that can be JSONify using json.dump.
+ def as_dict(
+ self,
+ keep_readonly: bool = True,
+ key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer,
+ **kwargs: Any
+ ) -> JSON:
+ """Return a dict that can be serialized using json.dump.
Advanced usage might optionally use a callback as parameter:
@@ -363,12 +426,15 @@ def my_key_transformer(key, attr_desc, value):
If you want XML serialization, you can pass the kwargs is_xml=True.
+ :param bool keep_readonly: If you want to serialize the readonly attributes
:param function key_transformer: A key transformer function.
:returns: A dict JSON compatible object
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs)
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
@classmethod
def _infer_class_models(cls):
@@ -378,25 +444,31 @@ def _infer_class_models(cls):
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
if cls.__name__ not in client_models:
raise ValueError("Not Autorest generated code")
- except Exception:
+ except Exception: # pylint: disable=broad-exception-caught
# Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
client_models = {cls.__name__: cls}
return client_models
@classmethod
- def deserialize(cls, data, content_type=None):
+ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType:
"""Parse a str using the RestAPI syntax and return a model.
:param str data: A str using RestAPI structure. JSON by default.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
- return deserializer(cls.__name__, data, content_type=content_type)
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@classmethod
- def from_dict(cls, data, key_extractors=None, content_type=None):
+ def from_dict(
+ cls: Type[ModelType],
+ data: Any,
+ key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None,
+ content_type: Optional[str] = None,
+ ) -> ModelType:
"""Parse a dict using given key extractor return a model.
By default consider key
@@ -404,13 +476,15 @@ def from_dict(cls, data, key_extractors=None, content_type=None):
and last_rest_key_case_insensitive_extractor)
:param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
- deserializer.key_extractors = (
- [
+ deserializer.key_extractors = ( # type: ignore
+ [ # type: ignore
attribute_key_case_insensitive_extractor,
rest_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
@@ -418,7 +492,7 @@ def from_dict(cls, data, key_extractors=None, content_type=None):
if key_extractors is None
else key_extractors
)
- return deserializer(cls.__name__, data, content_type=content_type)
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@classmethod
def _flatten_subtype(cls, key, objects):
@@ -426,21 +500,25 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects))
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
return result
@classmethod
def _classify(cls, response, objects):
"""Check the class _subtype_map for any child classes.
We want to ignore any inherited _subtype_maps.
- Remove the polymorphic key from the initial data.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
"""
for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
subtype_value = None
if not isinstance(response, ET.Element):
rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
- subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None)
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
else:
subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
if subtype_value:
@@ -479,11 +557,13 @@ def _decode_attribute_map_key(key):
inside the received data.
:param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
"""
return key.replace("\\.", ".")
-class Serializer(object):
+class Serializer: # pylint: disable=too-many-public-methods
"""Request object model serializer."""
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
@@ -518,7 +598,7 @@ class Serializer(object):
"multiple": lambda x, y: x % y != 0,
}
- def __init__(self, classes=None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.serialize_type = {
"iso-8601": Serializer.serialize_iso,
"rfc-1123": Serializer.serialize_rfc,
@@ -534,17 +614,20 @@ def __init__(self, classes=None):
"[]": self.serialize_iter,
"{}": self.serialize_dict,
}
- self.dependencies = dict(classes) if classes else {}
+ self.dependencies: Dict[str, type] = dict(classes) if classes else {}
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
- def _serialize(self, target_obj, data_type=None, **kwargs):
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
"""Serialize data into a string according to type.
- :param target_obj: The data to be serialized.
+ :param object target_obj: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str, dict
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
"""
key_transformer = kwargs.get("key_transformer", self.key_transformer)
keep_readonly = kwargs.get("keep_readonly", False)
@@ -570,12 +653,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
serialized = {}
if is_xml_model_serialization:
- serialized = target_obj._create_xml_node()
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
try:
- attributes = target_obj._attribute_map
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
for attr, attr_desc in attributes.items():
attr_name = attr
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
continue
if attr_name == "additional_properties" and attr_desc["key"] == "":
@@ -602,7 +687,7 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if xml_desc.get("attr", False):
if xml_ns:
ET.register_namespace(xml_prefix, xml_ns)
- xml_name = "{}{}".format(xml_ns, xml_name)
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
serialized.set(xml_name, new_attr) # type: ignore
continue
if xml_desc.get("text", False):
@@ -611,7 +696,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if isinstance(new_attr, list):
serialized.extend(new_attr) # type: ignore
elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
if "name" not in getattr(orig_attr, "_xml_map", {}):
splitted_tag = new_attr.tag.split("}")
if len(splitted_tag) == 2: # Namespace
@@ -622,12 +708,11 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
else: # That's a basic type
# Integrate namespace if necessary
local_node = _create_xml_node(xml_name, xml_prefix, xml_ns)
- local_node.text = unicode_str(new_attr)
+ local_node.text = str(new_attr)
serialized.append(local_node) # type: ignore
else: # JSON
for k in reversed(keys): # type: ignore
- unflattened = {k: new_attr}
- new_attr = unflattened
+ new_attr = {k: new_attr}
_new_attr = new_attr
_serialized = serialized
@@ -636,28 +721,29 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
_serialized.update(_new_attr) # type: ignore
_new_attr = _new_attr[k] # type: ignore
_serialized = _serialized[k]
- except ValueError:
- continue
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
- raise_with_traceback(SerializationError, msg, err)
- else:
- return serialized
+ raise SerializationError(msg) from err
+ return serialized
def body(self, data, data_type, **kwargs):
"""Serialize data intended for a request body.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: dict
:raises: SerializationError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized request body
"""
# Just in case this is a dict
- internal_data_type = data_type.strip("[]{}")
- internal_data_type = self.dependencies.get(internal_data_type, None)
+ internal_data_type_str = data_type.strip("[]{}")
+ internal_data_type = self.dependencies.get(internal_data_type_str, None)
try:
is_xml_model_serialization = kwargs["is_xml"]
except KeyError:
@@ -681,18 +767,20 @@ def body(self, data, data_type, **kwargs):
attribute_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
]
- data = deserializer._deserialize(data_type, data)
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
except DeserializationError as err:
- raise_with_traceback(SerializationError, "Unable to build a model: " + str(err), err)
+ raise SerializationError("Unable to build a model: " + str(err)) from err
return self._serialize(data, data_type, **kwargs)
def url(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL path.
- :param data: The data to be serialized.
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
+ :returns: The serialized URL path
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
"""
@@ -703,30 +791,30 @@ def url(self, name, data, data_type, **kwargs):
if kwargs.get("skip_quote") is True:
output = str(output)
+ output = output.replace("{", quote("{")).replace("}", quote("}"))
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return output
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
def query(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL query.
- :param data: The data to be serialized.
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :rtype: str
+ :rtype: str, list
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized query parameter
"""
try:
# Treat the list aside, since we don't want to encode the div separator
if data_type.startswith("["):
internal_data_type = data_type[1:-1]
- data = [self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" for d in data]
- if not kwargs.get("skip_quote", False):
- data = [quote(str(d), safe="") for d in data]
- return str(self.serialize_iter(data, internal_data_type, **kwargs))
+ do_quote = not kwargs.get("skip_quote", False)
+ return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)
# Not a list, regular serialization
output = self.serialize_data(data, data_type, **kwargs)
@@ -736,19 +824,20 @@ def query(self, name, data, data_type, **kwargs):
output = str(output)
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def header(self, name, data, data_type, **kwargs):
"""Serialize data intended for a request header.
- :param data: The data to be serialized.
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized header
"""
try:
if data_type in ["[str]"]:
@@ -757,30 +846,31 @@ def header(self, name, data, data_type, **kwargs):
output = self.serialize_data(data, data_type, **kwargs)
if data_type == "bool":
output = json.dumps(output)
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def serialize_data(self, data, data_type, **kwargs):
"""Serialize generic data according to supplied data type.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :param bool required: Whether it's essential that the data not be
- empty or None
:raises: AttributeError if required data is None.
:raises: ValueError if data is None
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
"""
if data is None:
raise ValueError("No value for given attribute")
try:
+ if data is CoreNull:
+ return None
if data_type in self.basic_types.values():
return self.serialize_basic(data, data_type, **kwargs)
- elif data_type in self.serialize_type:
+ if data_type in self.serialize_type:
return self.serialize_type[data_type](data, **kwargs)
# If dependencies is empty, try with current data class
@@ -795,12 +885,11 @@ def serialize_data(self, data, data_type, **kwargs):
except (ValueError, TypeError) as err:
msg = "Unable to serialize value: {!r} as type: {!r}."
- raise_with_traceback(SerializationError, msg.format(data, data_type), err)
- else:
- return self._serialize(data, **kwargs)
+ raise SerializationError(msg.format(data, data_type)) from err
+ return self._serialize(data, **kwargs)
@classmethod
- def _get_custom_serializers(cls, data_type, **kwargs):
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
if custom_serializer:
return custom_serializer
@@ -816,23 +905,26 @@ def serialize_basic(cls, data, data_type, **kwargs):
- basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- is_xml bool : If set, use xml_basic_types_serializers
- :param data: Object to be serialized.
+ :param obj data: Object to be serialized.
:param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
@classmethod
def serialize_unicode(cls, data):
"""Special handling for serializing unicode strings in Py2.
Encode to UTF-8 if unicode, otherwise handle as a str.
- :param data: Object to be serialized.
+ :param str data: Object to be serialized.
:rtype: str
+ :return: serialized object
"""
try: # If I received an enum, return its value
return data.value
@@ -846,8 +938,7 @@ def serialize_unicode(cls, data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
def serialize_iter(self, data, iter_type, div=None, **kwargs):
"""Serialize iterable.
@@ -857,13 +948,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
serialization_ctxt['type'] should be same as data_type.
- is_xml bool : If set, serialize as XML
- :param list attr: Object to be serialized.
+ :param list data: Object to be serialized.
:param str iter_type: Type of object in the iterable.
- :param bool required: Whether the objects in the iterable must
- not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
+ Defaults to False.
:rtype: list, str
+ :return: serialized iterable
"""
if isinstance(data, str):
raise SerializationError("Refuse str type as a valid iter type.")
@@ -875,9 +966,14 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
for d in data:
try:
serialized.append(self.serialize_data(d, iter_type, **kwargs))
- except ValueError:
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
serialized.append(None)
+ if kwargs.get("do_quote", False):
+ serialized = ["" if s is None else quote(str(s), safe="") for s in serialized]
+
if div:
serialized = ["" if s is None else str(s) for s in serialized]
serialized = div.join(serialized)
@@ -913,16 +1009,17 @@ def serialize_dict(self, attr, dict_type, **kwargs):
:param dict attr: Object to be serialized.
:param str dict_type: Type of object in the dictionary.
- :param bool required: Whether the objects in the dictionary must
- not be None or empty.
:rtype: dict
+ :return: serialized dictionary
"""
serialization_ctxt = kwargs.get("serialization_ctxt", {})
serialized = {}
for key, value in attr.items():
try:
serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs)
- except ValueError:
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
serialized[self.serialize_unicode(key)] = None
if "xml" in serialization_ctxt:
@@ -937,7 +1034,7 @@ def serialize_dict(self, attr, dict_type, **kwargs):
return serialized
- def serialize_object(self, attr, **kwargs):
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Serialize a generic object.
This will be handled as a dictionary. If object passed in is not
a basic type (str, int, float, dict, list) it will simply be
@@ -945,6 +1042,7 @@ def serialize_object(self, attr, **kwargs):
:param dict attr: Object to be serialized.
:rtype: dict or str
+ :return: serialized object
"""
if attr is None:
return None
@@ -955,7 +1053,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs)
if obj_type is _long_type:
return self.serialize_long(attr)
- if obj_type is unicode_str:
+ if obj_type is str:
return self.serialize_unicode(attr)
if obj_type is datetime.datetime:
return self.serialize_iso(attr)
@@ -969,7 +1067,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_decimal(attr)
# If it's a model or I know this dependency, serialize as a Model
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
return self._serialize(attr)
if obj_type == dict:
@@ -1000,56 +1098,61 @@ def serialize_enum(attr, enum_obj=None):
try:
enum_obj(result) # type: ignore
return result
- except ValueError:
+ except ValueError as exc:
for enum_value in enum_obj: # type: ignore
if enum_value.value.lower() == str(attr).lower():
return enum_value.value
error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj))
+ raise SerializationError(error.format(attr, enum_obj)) from exc
@staticmethod
- def serialize_bytearray(attr, **kwargs):
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize bytearray into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
return b64encode(attr).decode()
@staticmethod
- def serialize_base64(attr, **kwargs):
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize str into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
encoded = b64encode(attr).decode("ascii")
return encoded.strip("=").replace("+", "-").replace("/", "_")
@staticmethod
- def serialize_decimal(attr, **kwargs):
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Decimal object to float.
- :param attr: Object to be serialized.
+ :param decimal attr: Object to be serialized.
:rtype: float
+ :return: serialized decimal
"""
return float(attr)
@staticmethod
- def serialize_long(attr, **kwargs):
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize long (Py2) or int (Py3).
- :param attr: Object to be serialized.
+ :param int attr: Object to be serialized.
:rtype: int/long
+ :return: serialized long
"""
return _long_type(attr)
@staticmethod
- def serialize_date(attr, **kwargs):
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Date object into ISO-8601 formatted string.
:param Date attr: Object to be serialized.
:rtype: str
+ :return: serialized date
"""
if isinstance(attr, str):
attr = isodate.parse_date(attr)
@@ -1057,11 +1160,12 @@ def serialize_date(attr, **kwargs):
return t
@staticmethod
- def serialize_time(attr, **kwargs):
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Time object into ISO-8601 formatted string.
:param datetime.time attr: Object to be serialized.
:rtype: str
+ :return: serialized time
"""
if isinstance(attr, str):
attr = isodate.parse_time(attr)
@@ -1071,30 +1175,32 @@ def serialize_time(attr, **kwargs):
return t
@staticmethod
- def serialize_duration(attr, **kwargs):
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize TimeDelta object into ISO-8601 formatted string.
:param TimeDelta attr: Object to be serialized.
:rtype: str
+ :return: serialized duration
"""
if isinstance(attr, str):
attr = isodate.parse_duration(attr)
return isodate.duration_isoformat(attr)
@staticmethod
- def serialize_rfc(attr, **kwargs):
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into RFC-1123 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: TypeError if format invalid.
+ :return: serialized rfc
"""
try:
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
utc = attr.utctimetuple()
- except AttributeError:
- raise TypeError("RFC1123 object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
Serializer.days[utc.tm_wday],
@@ -1107,12 +1213,13 @@ def serialize_rfc(attr, **kwargs):
)
@staticmethod
- def serialize_iso(attr, **kwargs):
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: SerializationError if format invalid.
+ :return: serialized iso
"""
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
@@ -1132,19 +1239,20 @@ def serialize_iso(attr, **kwargs):
return date + microseconds + "Z"
except (ValueError, OverflowError) as err:
msg = "Unable to serialize datetime object."
- raise_with_traceback(SerializationError, msg, err)
+ raise SerializationError(msg) from err
except AttributeError as err:
msg = "ISO-8601 object must be valid Datetime object."
- raise_with_traceback(TypeError, msg, err)
+ raise TypeError(msg) from err
@staticmethod
- def serialize_unix(attr, **kwargs):
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into IntTime format.
This is represented as seconds.
:param Datetime attr: Object to be serialized.
:rtype: int
:raises: SerializationError if format invalid
+ :return: serialied unix
"""
if isinstance(attr, int):
return attr
@@ -1152,16 +1260,17 @@ def serialize_unix(attr, **kwargs):
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError:
- raise TypeError("Unix time object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
-def rest_key_extractor(attr, attr_desc, data):
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
key = attr_desc["key"]
working_data = data
while "." in key:
- dict_keys = _FLATTEN.split(key)
+ # Need the cast, as for some reasons "split" is typed as list[str | Any]
+ dict_keys = cast(List[str], _FLATTEN.split(key))
if len(dict_keys) == 1:
key = _decode_attribute_map_key(dict_keys[0])
break
@@ -1170,14 +1279,15 @@ def rest_key_extractor(attr, attr_desc, data):
if working_data is None:
# If at any point while following flatten JSON path see None, it means
# that all properties under are None as well
- # https://github.com/Azure/msrest-for-python/issues/197
return None
key = ".".join(dict_keys[1:])
return working_data.get(key)
-def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
key = attr_desc["key"]
working_data = data
@@ -1191,7 +1301,6 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
if working_data is None:
# If at any point while following flatten JSON path see None, it means
# that all properties under are None as well
- # https://github.com/Azure/msrest-for-python/issues/197
return None
key = ".".join(dict_keys[1:])
@@ -1199,17 +1308,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
return attribute_key_case_insensitive_extractor(key, None, working_data)
-def last_rest_key_extractor(attr, attr_desc, data):
- """Extract the attribute in "data" based on the last part of the JSON path key."""
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
return attribute_key_extractor(dict_keys[-1], None, data)
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
"""Extract the attribute in "data" based on the last part of the JSON path key.
This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
"""
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
@@ -1242,11 +1363,11 @@ def _extract_name_from_internal_type(internal_type):
xml_name = internal_type_xml_map.get("name", internal_type.__name__)
xml_ns = internal_type_xml_map.get("ns", None)
if xml_ns:
- xml_name = "{}{}".format(xml_ns, xml_name)
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
return xml_name
-def xml_key_extractor(attr, attr_desc, data):
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
if isinstance(data, dict):
return None
@@ -1266,7 +1387,7 @@ def xml_key_extractor(attr, attr_desc, data):
# Integrate namespace if necessary
xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None))
if xml_ns:
- xml_name = "{}{}".format(xml_ns, xml_name)
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
# If it's an attribute, that's simple
if xml_desc.get("attr", False):
@@ -1298,22 +1419,21 @@ def xml_key_extractor(attr, attr_desc, data):
if is_iter_type:
if is_wrapped:
return None # is_wrapped no node, we want None
- else:
- return [] # not wrapped, assume empty list
+ return [] # not wrapped, assume empty list
return None # Assume it's not there, maybe an optional node.
# If is_iter_type and not wrapped, return all found children
if is_iter_type:
if not is_wrapped:
return children
- else: # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
)
- return list(children[0]) # Might be empty list and that's ok.
+ )
+ return list(children[0]) # Might be empty list and that's ok.
# Here it's not a itertype, we should have found one element only or empty
if len(children) > 1:
@@ -1321,7 +1441,7 @@ def xml_key_extractor(attr, attr_desc, data):
return children[0]
-class Deserializer(object):
+class Deserializer:
"""Response object model deserializer.
:param dict classes: Class type dictionary for deserializing complex types.
@@ -1330,9 +1450,9 @@ class Deserializer(object):
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
- def __init__(self, classes=None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.deserialize_type = {
"iso-8601": Deserializer.deserialize_iso,
"rfc-1123": Deserializer.deserialize_rfc,
@@ -1352,7 +1472,7 @@ def __init__(self, classes=None):
"duration": (isodate.Duration, datetime.timedelta),
"iso-8601": (datetime.datetime),
}
- self.dependencies = dict(classes) if classes else {}
+ self.dependencies: Dict[str, type] = dict(classes) if classes else {}
self.key_extractors = [rest_key_extractor, xml_key_extractor]
# Additional properties only works if the "rest_key_extractor" is used to
# extract the keys. Making it to work whatever the key extractor is too much
@@ -1370,11 +1490,12 @@ def __call__(self, target_obj, response_data, content_type=None):
:param str content_type: Swagger "produces" if available.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
data = self._unpack_content(response_data, content_type)
return self._deserialize(target_obj, data)
- def _deserialize(self, target_obj, data):
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
"""Call the deserializer on a model.
Data needs to be already deserialized as JSON or XML ElementTree
@@ -1383,12 +1504,13 @@ def _deserialize(self, target_obj, data):
:param object data: Object to deserialize.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
# This is already a model, go recursive just in case
if hasattr(data, "_attribute_map"):
constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
try:
- for attr, mapconfig in data._attribute_map.items():
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
if attr in constants:
continue
value = getattr(data, attr)
@@ -1405,15 +1527,15 @@ def _deserialize(self, target_obj, data):
response, class_name = self._classify_target(target_obj, data)
- if isinstance(response, basestring):
+ if isinstance(response, str):
return self.deserialize_data(data, response)
- elif isinstance(response, type) and issubclass(response, Enum):
+ if isinstance(response, type) and issubclass(response, Enum):
return self.deserialize_enum(data, response)
- if data is None:
+ if data is None or data is CoreNull:
return data
try:
- attributes = response._attribute_map # type: ignore
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
d_attrs = {}
for attr, attr_desc in attributes.items():
# Check empty string. If it's not empty, someone has a real "additionalProperties"...
@@ -1442,10 +1564,9 @@ def _deserialize(self, target_obj, data):
d_attrs[attr] = value
except (AttributeError, TypeError, KeyError) as err:
msg = "Unable to deserialize to object: " + class_name # type: ignore
- raise_with_traceback(DeserializationError, msg, err)
- else:
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
+ raise DeserializationError(msg) from err
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
def _build_additional_properties(self, attribute_map, data):
if not self.additional_properties_detection:
@@ -1471,22 +1592,24 @@ def _classify_target(self, target, data):
Once classification has been determined, initialize object.
:param str target: The target object type to deserialize to.
- :param str/dict data: The response data to deseralize.
+ :param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
"""
if target is None:
return None, None
- if isinstance(target, basestring):
+ if isinstance(target, str):
try:
target = self.dependencies[target]
except KeyError:
return target, target
try:
- target = target._classify(data, self.dependencies)
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
except AttributeError:
pass # Target is not a Model, no classify
- return target, target.__class__.__name__
+ return target, target.__class__.__name__ # type: ignore
def failsafe_deserialize(self, target_obj, data, content_type=None):
"""Ignores any errors encountered in deserialization,
@@ -1496,12 +1619,14 @@ def failsafe_deserialize(self, target_obj, data, content_type=None):
a deserialization error.
:param str target_obj: The target object type to deserialize to.
- :param str/dict data: The response data to deseralize.
+ :param str/dict data: The response data to deserialize.
:param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
"""
try:
return self(target_obj, data, content_type=content_type)
- except:
+ except: # pylint: disable=bare-except
_LOGGER.debug(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -1519,10 +1644,12 @@ def _unpack_content(raw_data, content_type=None):
If raw_data is something else, bypass all logic and return it directly.
- :param raw_data: Data to be processed.
- :param content_type: How to parse if raw_data is a string/bytes.
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
:raises JSONDecodeError: If JSON is requested and parsing is impossible.
:raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
"""
# Assume this is enough to detect a Pipeline Response without importing it
context = getattr(raw_data, "context", {})
@@ -1539,31 +1666,42 @@ def _unpack_content(raw_data, content_type=None):
if hasattr(raw_data, "_content_consumed"):
return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers)
- if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"):
+ if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"):
return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore
return raw_data
def _instantiate_model(self, response, attrs, additional_properties=None):
"""Instantiate a response model passing in deserialized args.
- :param response: The response model class.
- :param d_attrs: The deserialized response attributes.
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
"""
if callable(response):
subtype = getattr(response, "_subtype_map", {})
try:
- readonly = [k for k, v in response._validation.items() if v.get("readonly")]
- const = [k for k, v in response._validation.items() if v.get("constant")]
+ readonly = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("readonly")
+ ]
+ const = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("constant")
+ ]
kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
response_obj = response(**kwargs)
for attr in readonly:
setattr(response_obj, attr, attrs.get(attr))
if additional_properties:
- response_obj.additional_properties = additional_properties
+ response_obj.additional_properties = additional_properties # type: ignore
return response_obj
except TypeError as err:
msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err))
+ raise DeserializationError(msg + str(err)) from err
else:
try:
for attr, value in attrs.items():
@@ -1572,15 +1710,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
except Exception as exp:
msg = "Unable to populate response model. "
msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg)
+ raise DeserializationError(msg) from exp
- def deserialize_data(self, data, data_type):
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
"""Process data for deserialization according to data type.
:param str data: The response string to be deserialized.
:param str data_type: The type to deserialize to.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
if data is None:
return data
@@ -1594,7 +1733,11 @@ def deserialize_data(self, data, data_type):
if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
return data
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
return None
data_val = self.deserialize_type[data_type](data)
@@ -1613,15 +1756,15 @@ def deserialize_data(self, data, data_type):
except (ValueError, TypeError, AttributeError) as err:
msg = "Unable to deserialize response data."
msg += " Data: {}, {}".format(data, data_type)
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return self._deserialize(obj_type, data)
+ raise DeserializationError(msg) from err
+ return self._deserialize(obj_type, data)
def deserialize_iter(self, attr, iter_type):
"""Deserialize an iterable.
:param list attr: Iterable to be deserialized.
:param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
:rtype: list
"""
if attr is None:
@@ -1638,6 +1781,7 @@ def deserialize_dict(self, attr, dict_type):
:param dict/list attr: Dictionary to be deserialized. Also accepts
a list of key, value pairs.
:param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
:rtype: dict
"""
if isinstance(attr, list):
@@ -1648,11 +1792,12 @@ def deserialize_dict(self, attr, dict_type):
attr = {el.tag: el.text for el in attr}
return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
- def deserialize_object(self, attr, **kwargs):
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Deserialize a generic object.
This will be handled as a dictionary.
:param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
:rtype: dict
:raises: TypeError if non-builtin datatype encountered.
"""
@@ -1661,7 +1806,7 @@ def deserialize_object(self, attr, **kwargs):
if isinstance(attr, ET.Element):
# Do no recurse on XML, just return the tree as-is
return attr
- if isinstance(attr, basestring):
+ if isinstance(attr, str):
return self.deserialize_basic(attr, "str")
obj_type = type(attr)
if obj_type in self.basic_types:
@@ -1687,11 +1832,10 @@ def deserialize_object(self, attr, **kwargs):
pass
return deserialized
- else:
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
- def deserialize_basic(self, attr, data_type):
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
"""Deserialize basic builtin data type from string.
Will attempt to convert to str, int, float and bool.
This function will also accept '1', '0', 'true' and 'false' as
@@ -1699,6 +1843,7 @@ def deserialize_basic(self, attr, data_type):
:param str attr: response string to be deserialized.
:param str data_type: deserialization data type.
+ :return: Deserialized basic type.
:rtype: str, int, float or bool
:raises: TypeError if string format is not valid.
"""
@@ -1710,24 +1855,23 @@ def deserialize_basic(self, attr, data_type):
if data_type == "str":
# None or '', node is empty string.
return ""
- else:
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
if data_type == "bool":
if attr in [True, False, 1, 0]:
return bool(attr)
- elif isinstance(attr, basestring):
+ if isinstance(attr, str):
if attr.lower() in ["true", "1"]:
return True
- elif attr.lower() in ["false", "0"]:
+ if attr.lower() in ["false", "0"]:
return False
raise TypeError("Invalid boolean value: {}".format(attr))
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
@staticmethod
def deserialize_unicode(data):
@@ -1735,6 +1879,7 @@ def deserialize_unicode(data):
as a string.
:param str data: response string to be deserialized.
+ :return: Deserialized string.
:rtype: str or unicode
"""
# We might be here because we have an enum modeled as string,
@@ -1748,8 +1893,7 @@ def deserialize_unicode(data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
@staticmethod
def deserialize_enum(data, enum_obj):
@@ -1761,6 +1905,7 @@ def deserialize_enum(data, enum_obj):
:param str data: Response string to be deserialized. If this value is
None or invalid it will be returned as-is.
:param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
:rtype: Enum
"""
if isinstance(data, enum_obj) or data is None:
@@ -1769,12 +1914,11 @@ def deserialize_enum(data, enum_obj):
data = data.value
if isinstance(data, int):
# Workaround. We might consider remove it in the future.
- # https://github.com/Azure/azure-rest-api-specs/issues/141
try:
return list(enum_obj.__members__.values())[data]
- except IndexError:
+ except IndexError as exc:
error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj))
+ raise DeserializationError(error.format(data, enum_obj)) from exc
try:
return enum_obj(str(data))
except ValueError:
@@ -1790,6 +1934,7 @@ def deserialize_bytearray(attr):
"""Deserialize string into bytearray.
:param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1802,6 +1947,7 @@ def deserialize_base64(attr):
"""Deserialize base64 encoded string into string.
:param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1817,22 +1963,24 @@ def deserialize_decimal(attr):
"""Deserialize string into Decimal object.
:param str attr: response string to be deserialized.
- :rtype: Decimal
+ :return: Deserialized decimal
:raises: DeserializationError if string format invalid.
+ :rtype: decimal
"""
if isinstance(attr, ET.Element):
attr = attr.text
try:
- return decimal.Decimal(attr) # type: ignore
+ return decimal.Decimal(str(attr)) # type: ignore
except decimal.DecimalException as err:
msg = "Invalid decimal {}".format(attr)
- raise_with_traceback(DeserializationError, msg, err)
+ raise DeserializationError(msg) from err
@staticmethod
def deserialize_long(attr):
"""Deserialize string into long (Py2) or int (Py3).
:param str attr: response string to be deserialized.
+ :return: Deserialized int
:rtype: long or int
:raises: ValueError if string format invalid.
"""
@@ -1845,6 +1993,7 @@ def deserialize_duration(attr):
"""Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
+ :return: Deserialized duration
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
"""
@@ -1854,15 +2003,15 @@ def deserialize_duration(attr):
duration = isodate.parse_duration(attr)
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return duration
+ raise DeserializationError(msg) from err
+ return duration
@staticmethod
def deserialize_date(attr):
"""Deserialize ISO-8601 formatted string into Date object.
:param str attr: response string to be deserialized.
+ :return: Deserialized date
:rtype: Date
:raises: DeserializationError if string format invalid.
"""
@@ -1871,13 +2020,14 @@ def deserialize_date(attr):
if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
# This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
- return isodate.parse_date(attr, defaultmonth=None, defaultday=None)
+ return isodate.parse_date(attr, defaultmonth=0, defaultday=0)
@staticmethod
def deserialize_time(attr):
"""Deserialize ISO-8601 formatted string into time object.
:param str attr: response string to be deserialized.
+ :return: Deserialized time
:rtype: datetime.time
:raises: DeserializationError if string format invalid.
"""
@@ -1892,6 +2042,7 @@ def deserialize_rfc(attr):
"""Deserialize RFC-1123 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1906,15 +2057,15 @@ def deserialize_rfc(attr):
date_obj = date_obj.astimezone(tz=TZ_UTC)
except ValueError as err:
msg = "Cannot deserialize to rfc datetime object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return date_obj
+ raise DeserializationError(msg) from err
+ return date_obj
@staticmethod
def deserialize_iso(attr):
"""Deserialize ISO-8601 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1943,9 +2094,8 @@ def deserialize_iso(attr):
raise OverflowError("Hit max or min date")
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize datetime object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return date_obj
+ raise DeserializationError(msg) from err
+ return date_obj
@staticmethod
def deserialize_unix(attr):
@@ -1953,15 +2103,16 @@ def deserialize_unix(attr):
This is represented as seconds.
:param int attr: Object to be serialized.
+ :return: Deserialized datetime
:rtype: Datetime
:raises: DeserializationError if format invalid
"""
if isinstance(attr, ET.Element):
attr = int(attr.text) # type: ignore
try:
+ attr = int(attr)
date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC)
except ValueError as err:
msg = "Cannot deserialize to unix datetime object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return date_obj
+ raise DeserializationError(msg) from err
+ return date_obj
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_vendor.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_vendor.py
deleted file mode 100644
index 9aad73fc743e..000000000000
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_vendor.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-
-from azure.core.pipeline.transport import HttpRequest
-
-
-def _convert_request(request, files=None):
- data = request.content if not files else None
- request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data)
- if files:
- request.set_formdata_body(files)
- return request
-
-
-def _format_url_section(template, **kwargs):
- components = template.split("/")
- while components:
- try:
- return template.format(**kwargs)
- except KeyError as key:
- formatted_components = template.split("/")
- components = [c for c in formatted_components if "{}".format(key.args[0]) not in c]
- template = "/".join(components)
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_version.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_version.py
index b5e2ac841400..44a6bf38e127 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_version.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_version.py
@@ -6,4 +6,4 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-VERSION = "10.1.0b1"
+VERSION = "10.1.0b2"
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/__init__.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/__init__.py
index 3e54c9899ed1..ea3d43b85a90 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/__init__.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/__init__.py
@@ -5,12 +5,18 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._data_migration_management_client import DataMigrationManagementClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._data_migration_management_client import DataMigrationManagementClient # type: ignore
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -18,6 +24,6 @@
__all__ = [
"DataMigrationManagementClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_configuration.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_configuration.py
index 4e5ee073f2d1..f60f7d15ae50 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_configuration.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_configuration.py
@@ -6,26 +6,18 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-import sys
from typing import Any, TYPE_CHECKING
-from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy
from .._version import VERSION
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
-else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
-
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
-class DataMigrationManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
+class DataMigrationManagementClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
"""Configuration for DataMigrationManagementClient.
Note that all parameters used to create this instance are saved as instance
@@ -35,14 +27,13 @@ class DataMigrationManagementClientConfiguration(Configuration): # pylint: disa
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: Subscription ID that identifies an Azure subscription. Required.
:type subscription_id: str
- :keyword api_version: Api Version. Default value is "2022-03-30-preview". Note that overriding
+ :keyword api_version: Api Version. Default value is "2025-03-15-preview". Note that overriding
this default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None:
- super(DataMigrationManagementClientConfiguration, self).__init__(**kwargs)
- api_version: Literal["2022-03-30-preview"] = kwargs.pop("api_version", "2022-03-30-preview")
+ api_version: str = kwargs.pop("api_version", "2025-03-15-preview")
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
@@ -54,6 +45,7 @@ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **k
self.api_version = api_version
self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"])
kwargs.setdefault("sdk_moniker", "mgmt-datamigration/{}".format(VERSION))
+ self.polling_interval = kwargs.get("polling_interval", 30)
self._configure(**kwargs)
def _configure(self, **kwargs: Any) -> None:
@@ -62,9 +54,9 @@ def _configure(self, **kwargs: Any) -> None:
self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs)
- self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs)
+ self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs)
self.authentication_policy = kwargs.get("authentication_policy")
if self.credential and not self.authentication_policy:
self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_data_migration_management_client.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_data_migration_management_client.py
index 786aa9042b06..e5ad19dfc14f 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_data_migration_management_client.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_data_migration_management_client.py
@@ -8,18 +8,24 @@
from copy import deepcopy
from typing import Any, Awaitable, TYPE_CHECKING
+from typing_extensions import Self
+from azure.core.pipeline import policies
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
+from azure.mgmt.core.policies import AsyncARMAutoResourceProviderRegistrationPolicy
from .. import models as _models
from .._serialization import Deserializer, Serializer
from ._configuration import DataMigrationManagementClientConfiguration
from .operations import (
+ DatabaseMigrationsMongoToCosmosDbRUMongoOperations,
+ DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations,
DatabaseMigrationsSqlDbOperations,
DatabaseMigrationsSqlMiOperations,
DatabaseMigrationsSqlVmOperations,
FilesOperations,
+ MigrationServicesOperations,
Operations,
ProjectsOperations,
ResourceSkusOperations,
@@ -31,13 +37,20 @@
)
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
-class DataMigrationManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes
+class DataMigrationManagementClient: # pylint: disable=too-many-instance-attributes
"""Data Migration Client.
+ :ivar database_migrations_mongo_to_cosmos_db_ru_mongo:
+ DatabaseMigrationsMongoToCosmosDbRUMongoOperations operations
+ :vartype database_migrations_mongo_to_cosmos_db_ru_mongo:
+ azure.mgmt.datamigration.aio.operations.DatabaseMigrationsMongoToCosmosDbRUMongoOperations
+ :ivar database_migrations_mongo_to_cosmos_dbv_core_mongo:
+ DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations operations
+ :vartype database_migrations_mongo_to_cosmos_dbv_core_mongo:
+ azure.mgmt.datamigration.aio.operations.DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations
:ivar database_migrations_sql_db: DatabaseMigrationsSqlDbOperations operations
:vartype database_migrations_sql_db:
azure.mgmt.datamigration.aio.operations.DatabaseMigrationsSqlDbOperations
@@ -49,6 +62,9 @@ class DataMigrationManagementClient: # pylint: disable=client-accepts-api-versi
azure.mgmt.datamigration.aio.operations.DatabaseMigrationsSqlVmOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.datamigration.aio.operations.Operations
+ :ivar migration_services: MigrationServicesOperations operations
+ :vartype migration_services:
+ azure.mgmt.datamigration.aio.operations.MigrationServicesOperations
:ivar sql_migration_services: SqlMigrationServicesOperations operations
:vartype sql_migration_services:
azure.mgmt.datamigration.aio.operations.SqlMigrationServicesOperations
@@ -72,7 +88,7 @@ class DataMigrationManagementClient: # pylint: disable=client-accepts-api-versi
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
- :keyword api_version: Api Version. Default value is "2022-03-30-preview". Note that overriding
+ :keyword api_version: Api Version. Default value is "2025-03-15-preview". Note that overriding
this default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
@@ -89,12 +105,36 @@ def __init__(
self._config = DataMigrationManagementClientConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
- self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+ _policies = kwargs.pop("policies", None)
+ if _policies is None:
+ _policies = [
+ policies.RequestIdPolicy(**kwargs),
+ self._config.headers_policy,
+ self._config.user_agent_policy,
+ self._config.proxy_policy,
+ policies.ContentDecodePolicy(**kwargs),
+ AsyncARMAutoResourceProviderRegistrationPolicy(),
+ self._config.redirect_policy,
+ self._config.retry_policy,
+ self._config.authentication_policy,
+ self._config.custom_hook_policy,
+ self._config.logging_policy,
+ policies.DistributedTracingPolicy(**kwargs),
+ policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None,
+ self._config.http_logging_policy,
+ ]
+ self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, policies=_policies, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
+ self.database_migrations_mongo_to_cosmos_db_ru_mongo = DatabaseMigrationsMongoToCosmosDbRUMongoOperations(
+ self._client, self._config, self._serialize, self._deserialize
+ )
+ self.database_migrations_mongo_to_cosmos_dbv_core_mongo = DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations(
+ self._client, self._config, self._serialize, self._deserialize
+ )
self.database_migrations_sql_db = DatabaseMigrationsSqlDbOperations(
self._client, self._config, self._serialize, self._deserialize
)
@@ -105,6 +145,9 @@ def __init__(
self._client, self._config, self._serialize, self._deserialize
)
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
+ self.migration_services = MigrationServicesOperations(
+ self._client, self._config, self._serialize, self._deserialize
+ )
self.sql_migration_services = SqlMigrationServicesOperations(
self._client, self._config, self._serialize, self._deserialize
)
@@ -116,7 +159,9 @@ def __init__(
self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize)
self.files = FilesOperations(self._client, self._config, self._serialize, self._deserialize)
- def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
+ def _send_request(
+ self, request: HttpRequest, *, stream: bool = False, **kwargs: Any
+ ) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
@@ -136,14 +181,14 @@ def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncH
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
- return self._client.send_request(request_copy, **kwargs)
+ return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore
async def close(self) -> None:
await self._client.close()
- async def __aenter__(self) -> "DataMigrationManagementClient":
+ async def __aenter__(self) -> Self:
await self._client.__aenter__()
return self
- async def __aexit__(self, *exc_details) -> None:
+ async def __aexit__(self, *exc_details: Any) -> None:
await self._client.__aexit__(*exc_details)
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/__init__.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/__init__.py
index 4825871afb87..08a84f29ada2 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/__init__.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/__init__.py
@@ -5,29 +5,41 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._database_migrations_sql_db_operations import DatabaseMigrationsSqlDbOperations
-from ._database_migrations_sql_mi_operations import DatabaseMigrationsSqlMiOperations
-from ._database_migrations_sql_vm_operations import DatabaseMigrationsSqlVmOperations
-from ._operations import Operations
-from ._sql_migration_services_operations import SqlMigrationServicesOperations
-from ._resource_skus_operations import ResourceSkusOperations
-from ._services_operations import ServicesOperations
-from ._tasks_operations import TasksOperations
-from ._service_tasks_operations import ServiceTasksOperations
-from ._projects_operations import ProjectsOperations
-from ._usages_operations import UsagesOperations
-from ._files_operations import FilesOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._database_migrations_mongo_to_cosmos_db_ru_mongo_operations import DatabaseMigrationsMongoToCosmosDbRUMongoOperations # type: ignore
+from ._database_migrations_mongo_to_cosmos_dbv_core_mongo_operations import DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations # type: ignore
+from ._database_migrations_sql_db_operations import DatabaseMigrationsSqlDbOperations # type: ignore
+from ._database_migrations_sql_mi_operations import DatabaseMigrationsSqlMiOperations # type: ignore
+from ._database_migrations_sql_vm_operations import DatabaseMigrationsSqlVmOperations # type: ignore
+from ._operations import Operations # type: ignore
+from ._migration_services_operations import MigrationServicesOperations # type: ignore
+from ._sql_migration_services_operations import SqlMigrationServicesOperations # type: ignore
+from ._resource_skus_operations import ResourceSkusOperations # type: ignore
+from ._services_operations import ServicesOperations # type: ignore
+from ._tasks_operations import TasksOperations # type: ignore
+from ._service_tasks_operations import ServiceTasksOperations # type: ignore
+from ._projects_operations import ProjectsOperations # type: ignore
+from ._usages_operations import UsagesOperations # type: ignore
+from ._files_operations import FilesOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
+ "DatabaseMigrationsMongoToCosmosDbRUMongoOperations",
+ "DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations",
"DatabaseMigrationsSqlDbOperations",
"DatabaseMigrationsSqlMiOperations",
"DatabaseMigrationsSqlVmOperations",
"Operations",
+ "MigrationServicesOperations",
"SqlMigrationServicesOperations",
"ResourceSkusOperations",
"ServicesOperations",
@@ -37,5 +49,5 @@
"UsagesOperations",
"FilesOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py
new file mode 100644
index 000000000000..678c89928727
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py
@@ -0,0 +1,550 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from io import IOBase
+import sys
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import urllib.parse
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.core.utils import case_insensitive_dict
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models as _models
+from ...operations._database_migrations_mongo_to_cosmos_db_ru_mongo_operations import (
+ build_create_request,
+ build_delete_request,
+ build_get_for_scope_request,
+ build_get_request,
+)
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+
+class DatabaseMigrationsMongoToCosmosDbRUMongoOperations: # pylint: disable=name-too-long
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.mgmt.datamigration.aio.DataMigrationManagementClient`'s
+ :attr:`database_migrations_mongo_to_cosmos_db_ru_mongo` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs) -> None:
+ input_args = list(args)
+ self._client = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ @distributed_trace_async
+ async def get(
+ self, resource_group_name: str, target_resource_name: str, migration_name: str, **kwargs: Any
+ ) -> _models.DatabaseMigrationCosmosDbMongo:
+ """Get Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :return: DatabaseMigrationCosmosDbMongo or the result of cls(response)
+ :rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None)
+
+ _request = build_get_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ async def _create_initial(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(parameters, (IOBase, bytes)):
+ _content = parameters
+ else:
+ _json = self._serialize.body(parameters, "DatabaseMigrationCosmosDbMongo")
+
+ _request = build_create_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ content_type=content_type,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ async def begin_create(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: _models.DatabaseMigrationCosmosDbMongo,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo]:
+ """Create or Update Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param parameters: Details of CosmosDB for Mongo API Migration resource. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns either DatabaseMigrationCosmosDbMongo or
+ the result of cls(response)
+ :rtype:
+ ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ async def begin_create(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo]:
+ """Create or Update Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param parameters: Details of CosmosDB for Mongo API Migration resource. Required.
+ :type parameters: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns either DatabaseMigrationCosmosDbMongo or
+ the result of cls(response)
+ :rtype:
+ ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace_async
+ async def begin_create(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo]:
+ """Create or Update Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param parameters: Details of CosmosDB for Mongo API Migration resource. Is either a
+ DatabaseMigrationCosmosDbMongo type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo or IO[bytes]
+ :return: An instance of AsyncLROPoller that returns either DatabaseMigrationCosmosDbMongo or
+ the result of cls(response)
+ :rtype:
+ ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = await self._create_initial(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ parameters=parameters,
+ api_version=api_version,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ await raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response)
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ if polling is True:
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ async def _delete_initial(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ force: Optional[bool] = None,
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
+
+ _request = build_delete_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ subscription_id=self._config.subscription_id,
+ force=force,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace_async
+ async def begin_delete(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ force: Optional[bool] = None,
+ **kwargs: Any
+ ) -> AsyncLROPoller[None]:
+ """Delete Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param force: Optional force delete boolean. If this is provided as true, migration will be
+ deleted even if active. Default value is None.
+ :type force: bool
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ force=force,
+ api_version=api_version,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ await raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
+ if cls:
+ return cls(pipeline_response, None, {}) # type: ignore
+
+ if polling is True:
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return AsyncLROPoller[None].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
+
+ @distributed_trace
+ def get_for_scope(
+ self, resource_group_name: str, target_resource_name: str, **kwargs: Any
+ ) -> AsyncIterable["_models.DatabaseMigrationCosmosDbMongo"]:
+ """Get Database Migration resources for the scope.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :return: An iterator like instance of either DatabaseMigrationCosmosDbMongo or the result of
+ cls(response)
+ :rtype:
+ ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.DatabaseMigrationCosmosDbMongoListResult] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_get_for_scope_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize("DatabaseMigrationCosmosDbMongoListResult", pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(get_next, extract_data)
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py
new file mode 100644
index 000000000000..9c1499c7e979
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py
@@ -0,0 +1,550 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from io import IOBase
+import sys
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import urllib.parse
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.core.utils import case_insensitive_dict
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models as _models
+from ...operations._database_migrations_mongo_to_cosmos_dbv_core_mongo_operations import (
+ build_create_request,
+ build_delete_request,
+ build_get_for_scope_request,
+ build_get_request,
+)
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+
+class DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations: # pylint: disable=name-too-long
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.mgmt.datamigration.aio.DataMigrationManagementClient`'s
+ :attr:`database_migrations_mongo_to_cosmos_dbv_core_mongo` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs) -> None:
+ input_args = list(args)
+ self._client = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ @distributed_trace_async
+ async def get(
+ self, resource_group_name: str, target_resource_name: str, migration_name: str, **kwargs: Any
+ ) -> _models.DatabaseMigrationCosmosDbMongo:
+ """Get Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :return: DatabaseMigrationCosmosDbMongo or the result of cls(response)
+ :rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None)
+
+ _request = build_get_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ async def _create_initial(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(parameters, (IOBase, bytes)):
+ _content = parameters
+ else:
+ _json = self._serialize.body(parameters, "DatabaseMigrationCosmosDbMongo")
+
+ _request = build_create_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ content_type=content_type,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ async def begin_create(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: _models.DatabaseMigrationCosmosDbMongo,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo]:
+ """Create or Update Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param parameters: Details of CosmosDB for Mongo API Migration resource. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns either DatabaseMigrationCosmosDbMongo or
+ the result of cls(response)
+ :rtype:
+ ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ async def begin_create(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo]:
+ """Create or Update Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param parameters: Details of CosmosDB for Mongo API Migration resource. Required.
+ :type parameters: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns either DatabaseMigrationCosmosDbMongo or
+ the result of cls(response)
+ :rtype:
+ ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace_async
+ async def begin_create(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo]:
+ """Create or Update Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param parameters: Details of CosmosDB for Mongo API Migration resource. Is either a
+ DatabaseMigrationCosmosDbMongo type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo or IO[bytes]
+ :return: An instance of AsyncLROPoller that returns either DatabaseMigrationCosmosDbMongo or
+ the result of cls(response)
+ :rtype:
+ ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = await self._create_initial(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ parameters=parameters,
+ api_version=api_version,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ await raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response)
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ if polling is True:
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ async def _delete_initial(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ force: Optional[bool] = None,
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
+
+ _request = build_delete_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ subscription_id=self._config.subscription_id,
+ force=force,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace_async
+ async def begin_delete(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ force: Optional[bool] = None,
+ **kwargs: Any
+ ) -> AsyncLROPoller[None]:
+ """Delete Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param force: Optional force delete boolean. If this is provided as true, migration will be
+ deleted even if active. Default value is None.
+ :type force: bool
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ force=force,
+ api_version=api_version,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ await raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
+ if cls:
+ return cls(pipeline_response, None, {}) # type: ignore
+
+ if polling is True:
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return AsyncLROPoller[None].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
+
+ @distributed_trace
+ def get_for_scope(
+ self, resource_group_name: str, target_resource_name: str, **kwargs: Any
+ ) -> AsyncIterable["_models.DatabaseMigrationCosmosDbMongo"]:
+ """Get Database Migration resources for the scope.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :return: An iterator like instance of either DatabaseMigrationCosmosDbMongo or the result of
+ cls(response)
+ :rtype:
+ ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.DatabaseMigrationCosmosDbMongoListResult] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_get_for_scope_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize("DatabaseMigrationCosmosDbMongoListResult", pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(get_next, extract_data)
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_db_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_db_operations.py
index ebcfe8bdad64..f29a309a2846 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_db_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_db_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,8 +5,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+from typing import Any, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -15,19 +15,19 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._database_migrations_sql_db_operations import (
build_cancel_request,
build_create_or_update_request,
@@ -35,10 +35,10 @@
build_get_request,
)
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -87,12 +87,11 @@ async def get(
:type migration_operation_id: str
:param expand: Complete migration details be included in the response. Default value is None.
:type expand: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: DatabaseMigrationSqlDb or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -103,12 +102,10 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DatabaseMigrationSqlDb] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
sql_db_instance_name=sql_db_instance_name,
target_db_name=target_db_name,
@@ -116,15 +113,14 @@ async def get(
migration_operation_id=migration_operation_id,
expand=expand,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -133,26 +129,22 @@ async def get(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response)
+ deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return deserialized # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
sql_db_instance_name: str,
target_db_name: str,
- parameters: Union[_models.DatabaseMigrationSqlDb, IO],
+ parameters: Union[_models.DatabaseMigrationSqlDb, IO[bytes]],
**kwargs: Any
- ) -> _models.DatabaseMigrationSqlDb:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -163,21 +155,19 @@ async def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.DatabaseMigrationSqlDb] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "DatabaseMigrationSqlDb")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
sql_db_instance_name=sql_db_instance_name,
target_db_name=target_db_name,
@@ -186,38 +176,34 @@ async def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
-
@overload
async def begin_create_or_update(
self,
@@ -243,14 +229,6 @@ async def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlDb or the result
of cls(response)
:rtype:
@@ -264,7 +242,7 @@ async def begin_create_or_update(
resource_group_name: str,
sql_db_instance_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -279,18 +257,10 @@ async def begin_create_or_update(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Details of Sql Db migration resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlDb or the result
of cls(response)
:rtype:
@@ -304,7 +274,7 @@ async def begin_create_or_update(
resource_group_name: str,
sql_db_instance_name: str,
target_db_name: str,
- parameters: Union[_models.DatabaseMigrationSqlDb, IO],
+ parameters: Union[_models.DatabaseMigrationSqlDb, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.DatabaseMigrationSqlDb]:
"""Create or Update Database Migration resource.
@@ -316,20 +286,9 @@ async def begin_create_or_update(
:type sql_db_instance_name: str
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
- :param parameters: Details of Sql Db migration resource. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Details of Sql Db migration resource. Is either a DatabaseMigrationSqlDb
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb or IO[bytes]
:return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlDb or the result
of cls(response)
:rtype:
@@ -339,9 +298,7 @@ async def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DatabaseMigrationSqlDb] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
@@ -360,12 +317,13 @@ async def begin_create_or_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response)
+ deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -375,27 +333,25 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.DatabaseMigrationSqlDb].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return AsyncLROPoller[_models.DatabaseMigrationSqlDb](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
+ async def _delete_initial(
self,
resource_group_name: str,
sql_db_instance_name: str,
target_db_name: str,
force: Optional[bool] = None,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -406,41 +362,43 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
sql_db_instance_name=sql_db_instance_name,
target_db_name=target_db_name,
subscription_id=self._config.subscription_id,
force=force,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(
@@ -463,14 +421,6 @@ async def begin_delete(
:param force: Optional force delete boolean. If this is provided as true, migration will be
deleted even if active. Default value is None.
:type force: bool
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -478,15 +428,13 @@ async def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
sql_db_instance_name=sql_db_instance_name,
target_db_name=target_db_name,
@@ -497,11 +445,12 @@ async def begin_delete(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
@@ -510,27 +459,23 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _cancel_initial( # pylint: disable=inconsistent-return-statements
+ async def _cancel_initial(
self,
resource_group_name: str,
sql_db_instance_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -541,21 +486,19 @@ async def _cancel_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "MigrationOperationInput")
- request = build_cancel_request(
+ _request = build_cancel_request(
resource_group_name=resource_group_name,
sql_db_instance_name=sql_db_instance_name,
target_db_name=target_db_name,
@@ -564,29 +507,33 @@ async def _cancel_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._cancel_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _cancel_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel"
- }
+ return deserialized # type: ignore
@overload
async def begin_cancel(
@@ -614,14 +561,6 @@ async def begin_cancel(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -633,7 +572,7 @@ async def begin_cancel(
resource_group_name: str,
sql_db_instance_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -649,18 +588,10 @@ async def begin_cancel(
:type target_db_name: str
:param parameters: Required migration operation ID for which cancel will be initiated.
Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -672,7 +603,7 @@ async def begin_cancel(
resource_group_name: str,
sql_db_instance_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Stop on going migration for the database.
@@ -685,19 +616,8 @@ async def begin_cancel(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Required migration operation ID for which cancel will be initiated. Is
- either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ either a MigrationOperationInput type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes]
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -705,16 +625,14 @@ async def begin_cancel(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._cancel_initial( # type: ignore
+ raw_result = await self._cancel_initial(
resource_group_name=resource_group_name,
sql_db_instance_name=sql_db_instance_name,
target_db_name=target_db_name,
@@ -726,11 +644,12 @@ async def begin_cancel(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
@@ -739,14 +658,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_cancel.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel"
- }
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_mi_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_mi_operations.py
index 292a56ebb61f..88a5dab018e0 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_mi_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_mi_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,8 +5,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+from typing import Any, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -15,19 +15,19 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._database_migrations_sql_mi_operations import (
build_cancel_request,
build_create_or_update_request,
@@ -35,10 +35,10 @@
build_get_request,
)
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -87,12 +87,11 @@ async def get(
:type migration_operation_id: str
:param expand: Complete migration details be included in the response. Default value is None.
:type expand: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: DatabaseMigrationSqlMi or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -103,12 +102,10 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DatabaseMigrationSqlMi] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
target_db_name=target_db_name,
@@ -116,15 +113,14 @@ async def get(
migration_operation_id=migration_operation_id,
expand=expand,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -133,26 +129,22 @@ async def get(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response)
+ deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return deserialized # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: Union[_models.DatabaseMigrationSqlMi, IO],
+ parameters: Union[_models.DatabaseMigrationSqlMi, IO[bytes]],
**kwargs: Any
- ) -> _models.DatabaseMigrationSqlMi:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -163,21 +155,19 @@ async def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.DatabaseMigrationSqlMi] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "DatabaseMigrationSqlMi")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
target_db_name=target_db_name,
@@ -186,38 +176,34 @@ async def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
-
@overload
async def begin_create_or_update(
self,
@@ -243,14 +229,6 @@ async def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlMi or the result
of cls(response)
:rtype:
@@ -264,7 +242,7 @@ async def begin_create_or_update(
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -279,18 +257,10 @@ async def begin_create_or_update(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Details of SqlMigrationService resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlMi or the result
of cls(response)
:rtype:
@@ -304,7 +274,7 @@ async def begin_create_or_update(
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: Union[_models.DatabaseMigrationSqlMi, IO],
+ parameters: Union[_models.DatabaseMigrationSqlMi, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.DatabaseMigrationSqlMi]:
"""Create a new database migration to a given SQL Managed Instance.
@@ -316,20 +286,9 @@ async def begin_create_or_update(
:type managed_instance_name: str
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
- :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO
- type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Details of SqlMigrationService resource. Is either a DatabaseMigrationSqlMi
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi or IO[bytes]
:return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlMi or the result
of cls(response)
:rtype:
@@ -339,9 +298,7 @@ async def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DatabaseMigrationSqlMi] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
@@ -360,12 +317,13 @@ async def begin_create_or_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response)
+ deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -375,27 +333,25 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.DatabaseMigrationSqlMi].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return AsyncLROPoller[_models.DatabaseMigrationSqlMi](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- async def _cancel_initial( # pylint: disable=inconsistent-return-statements
+ async def _cancel_initial(
self,
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -406,21 +362,19 @@ async def _cancel_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "MigrationOperationInput")
- request = build_cancel_request(
+ _request = build_cancel_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
target_db_name=target_db_name,
@@ -429,29 +383,33 @@ async def _cancel_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._cancel_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _cancel_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel"
- }
+ return deserialized # type: ignore
@overload
async def begin_cancel(
@@ -479,14 +437,6 @@ async def begin_cancel(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -498,7 +448,7 @@ async def begin_cancel(
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -514,18 +464,10 @@ async def begin_cancel(
:type target_db_name: str
:param parameters: Required migration operation ID for which cancel will be initiated.
Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -537,7 +479,7 @@ async def begin_cancel(
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Stop in-progress database migration to SQL Managed Instance.
@@ -550,19 +492,8 @@ async def begin_cancel(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Required migration operation ID for which cancel will be initiated. Is
- either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ either a MigrationOperationInput type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes]
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -570,16 +501,14 @@ async def begin_cancel(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._cancel_initial( # type: ignore
+ raw_result = await self._cancel_initial(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
target_db_name=target_db_name,
@@ -591,11 +520,12 @@ async def begin_cancel(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
@@ -604,27 +534,23 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_cancel.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel"
- }
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _cutover_initial( # pylint: disable=inconsistent-return-statements
+ async def _cutover_initial(
self,
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -635,21 +561,19 @@ async def _cutover_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "MigrationOperationInput")
- request = build_cutover_request(
+ _request = build_cutover_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
target_db_name=target_db_name,
@@ -658,29 +582,33 @@ async def _cutover_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._cutover_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _cutover_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover"
- }
+ return deserialized # type: ignore
@overload
async def begin_cutover(
@@ -708,14 +636,6 @@ async def begin_cutover(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -727,7 +647,7 @@ async def begin_cutover(
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -743,18 +663,10 @@ async def begin_cutover(
:type target_db_name: str
:param parameters: Required migration operation ID for which cutover will be initiated.
Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -766,7 +678,7 @@ async def begin_cutover(
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Initiate cutover for in-progress online database migration to SQL Managed Instance.
@@ -779,19 +691,8 @@ async def begin_cutover(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Required migration operation ID for which cutover will be initiated. Is
- either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ either a MigrationOperationInput type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes]
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -799,16 +700,14 @@ async def begin_cutover(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._cutover_initial( # type: ignore
+ raw_result = await self._cutover_initial(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
target_db_name=target_db_name,
@@ -820,11 +719,12 @@ async def begin_cutover(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
@@ -833,14 +733,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_cutover.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover"
- }
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_vm_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_vm_operations.py
index 97c1b71108e0..6e4b08070a20 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_vm_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_vm_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,8 +5,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+from typing import Any, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -15,19 +15,19 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._database_migrations_sql_vm_operations import (
build_cancel_request,
build_create_or_update_request,
@@ -35,10 +35,10 @@
build_get_request,
)
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -87,12 +87,11 @@ async def get(
:type migration_operation_id: str
:param expand: Complete migration details be included in the response. Default value is None.
:type expand: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: DatabaseMigrationSqlVm or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -103,12 +102,10 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DatabaseMigrationSqlVm] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
sql_virtual_machine_name=sql_virtual_machine_name,
target_db_name=target_db_name,
@@ -116,15 +113,14 @@ async def get(
migration_operation_id=migration_operation_id,
expand=expand,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -133,26 +129,22 @@ async def get(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response)
+ deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return deserialized # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: Union[_models.DatabaseMigrationSqlVm, IO],
+ parameters: Union[_models.DatabaseMigrationSqlVm, IO[bytes]],
**kwargs: Any
- ) -> _models.DatabaseMigrationSqlVm:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -163,21 +155,19 @@ async def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.DatabaseMigrationSqlVm] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "DatabaseMigrationSqlVm")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
sql_virtual_machine_name=sql_virtual_machine_name,
target_db_name=target_db_name,
@@ -186,38 +176,34 @@ async def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
-
@overload
async def begin_create_or_update(
self,
@@ -243,14 +229,6 @@ async def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlVm or the result
of cls(response)
:rtype:
@@ -264,7 +242,7 @@ async def begin_create_or_update(
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -279,18 +257,10 @@ async def begin_create_or_update(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Details of SqlMigrationService resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlVm or the result
of cls(response)
:rtype:
@@ -304,7 +274,7 @@ async def begin_create_or_update(
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: Union[_models.DatabaseMigrationSqlVm, IO],
+ parameters: Union[_models.DatabaseMigrationSqlVm, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.DatabaseMigrationSqlVm]:
"""Create a new database migration to a given SQL VM.
@@ -316,20 +286,9 @@ async def begin_create_or_update(
:type sql_virtual_machine_name: str
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
- :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO
- type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Details of SqlMigrationService resource. Is either a DatabaseMigrationSqlVm
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm or IO[bytes]
:return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlVm or the result
of cls(response)
:rtype:
@@ -339,9 +298,7 @@ async def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DatabaseMigrationSqlVm] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
@@ -360,12 +317,13 @@ async def begin_create_or_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response)
+ deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -375,27 +333,25 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.DatabaseMigrationSqlVm].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return AsyncLROPoller[_models.DatabaseMigrationSqlVm](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- async def _cancel_initial( # pylint: disable=inconsistent-return-statements
+ async def _cancel_initial(
self,
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -406,21 +362,19 @@ async def _cancel_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "MigrationOperationInput")
- request = build_cancel_request(
+ _request = build_cancel_request(
resource_group_name=resource_group_name,
sql_virtual_machine_name=sql_virtual_machine_name,
target_db_name=target_db_name,
@@ -429,29 +383,33 @@ async def _cancel_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._cancel_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _cancel_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel"
- }
+ return deserialized # type: ignore
@overload
async def begin_cancel(
@@ -478,14 +436,6 @@ async def begin_cancel(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -497,7 +447,7 @@ async def begin_cancel(
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -512,18 +462,10 @@ async def begin_cancel(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -535,7 +477,7 @@ async def begin_cancel(
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Stop in-progress database migration to SQL VM.
@@ -547,19 +489,8 @@ async def begin_cancel(
:type sql_virtual_machine_name: str
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
- :param parameters: Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Is either a MigrationOperationInput type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes]
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -567,16 +498,14 @@ async def begin_cancel(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._cancel_initial( # type: ignore
+ raw_result = await self._cancel_initial(
resource_group_name=resource_group_name,
sql_virtual_machine_name=sql_virtual_machine_name,
target_db_name=target_db_name,
@@ -588,11 +517,12 @@ async def begin_cancel(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
@@ -601,27 +531,23 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_cancel.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel"
- }
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _cutover_initial( # pylint: disable=inconsistent-return-statements
+ async def _cutover_initial(
self,
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -632,21 +558,19 @@ async def _cutover_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "MigrationOperationInput")
- request = build_cutover_request(
+ _request = build_cutover_request(
resource_group_name=resource_group_name,
sql_virtual_machine_name=sql_virtual_machine_name,
target_db_name=target_db_name,
@@ -655,29 +579,33 @@ async def _cutover_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._cutover_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _cutover_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover"
- }
+ return deserialized # type: ignore
@overload
async def begin_cutover(
@@ -704,14 +632,6 @@ async def begin_cutover(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -723,7 +643,7 @@ async def begin_cutover(
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -738,18 +658,10 @@ async def begin_cutover(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -761,7 +673,7 @@ async def begin_cutover(
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Initiate cutover for in-progress online database migration to SQL VM.
@@ -773,19 +685,8 @@ async def begin_cutover(
:type sql_virtual_machine_name: str
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
- :param parameters: Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Is either a MigrationOperationInput type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes]
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -793,16 +694,14 @@ async def begin_cutover(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._cutover_initial( # type: ignore
+ raw_result = await self._cutover_initial(
resource_group_name=resource_group_name,
sql_virtual_machine_name=sql_virtual_machine_name,
target_db_name=target_db_name,
@@ -814,11 +713,12 @@ async def begin_cutover(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
@@ -827,14 +727,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_cutover.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover"
- }
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_files_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_files_operations.py
index 028ca68bb599..1ca29ab73be3 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_files_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_files_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
@@ -20,15 +20,13 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._files_operations import (
build_create_or_update_request,
build_delete_request,
@@ -39,10 +37,10 @@
build_update_request,
)
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -81,7 +79,6 @@ def list(
:type service_name: str
:param project_name: Name of the project. Required.
:type project_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProjectFile or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.ProjectFile]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -89,12 +86,10 @@ def list(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.FileList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -105,18 +100,16 @@ def list(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -128,13 +121,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("FileList", pipeline_response)
@@ -144,10 +136,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -160,10 +153,6 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files"
- }
-
@distributed_trace_async
async def get(
self, group_name: str, service_name: str, project_name: str, file_name: str, **kwargs: Any
@@ -181,12 +170,11 @@ async def get(
:type project_name: str
:param file_name: Name of the File. Required.
:type file_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectFile or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectFile
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -197,27 +185,24 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ProjectFile] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
file_name=file_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -227,16 +212,12 @@ async def get(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectFile", pipeline_response)
+ deserialized = self._deserialize("ProjectFile", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}"
- }
+ return deserialized # type: ignore
@overload
async def create_or_update(
@@ -267,7 +248,6 @@ async def create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectFile or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectFile
:raises ~azure.core.exceptions.HttpResponseError:
@@ -280,7 +260,7 @@ async def create_or_update(
service_name: str,
project_name: str,
file_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -298,11 +278,10 @@ async def create_or_update(
:param file_name: Name of the File. Required.
:type file_name: str
:param parameters: Information about the file. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectFile or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectFile
:raises ~azure.core.exceptions.HttpResponseError:
@@ -315,7 +294,7 @@ async def create_or_update(
service_name: str,
project_name: str,
file_name: str,
- parameters: Union[_models.ProjectFile, IO],
+ parameters: Union[_models.ProjectFile, IO[bytes]],
**kwargs: Any
) -> _models.ProjectFile:
"""Create a file resource.
@@ -330,17 +309,14 @@ async def create_or_update(
:type project_name: str
:param file_name: Name of the File. Required.
:type file_name: str
- :param parameters: Information about the file. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Information about the file. Is either a ProjectFile type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO[bytes]
:return: ProjectFile or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectFile
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -351,21 +327,19 @@ async def create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ProjectFile] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ProjectFile")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -375,15 +349,14 @@ async def create_or_update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -393,23 +366,15 @@ async def create_or_update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("ProjectFile", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("ProjectFile", pipeline_response)
+ deserialized = self._deserialize("ProjectFile", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}"
- }
-
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
+ async def delete(
self, group_name: str, service_name: str, project_name: str, file_name: str, **kwargs: Any
) -> None:
"""Delete file.
@@ -424,12 +389,11 @@ async def delete( # pylint: disable=inconsistent-return-statements
:type project_name: str
:param file_name: Name of the File. Required.
:type file_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -440,27 +404,24 @@ async def delete( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
file_name=file_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -471,11 +432,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}"
- }
+ return cls(pipeline_response, None, {}) # type: ignore
@overload
async def update(
@@ -506,7 +463,6 @@ async def update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectFile or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectFile
:raises ~azure.core.exceptions.HttpResponseError:
@@ -519,7 +475,7 @@ async def update(
service_name: str,
project_name: str,
file_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -537,11 +493,10 @@ async def update(
:param file_name: Name of the File. Required.
:type file_name: str
:param parameters: Information about the file. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectFile or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectFile
:raises ~azure.core.exceptions.HttpResponseError:
@@ -554,7 +509,7 @@ async def update(
service_name: str,
project_name: str,
file_name: str,
- parameters: Union[_models.ProjectFile, IO],
+ parameters: Union[_models.ProjectFile, IO[bytes]],
**kwargs: Any
) -> _models.ProjectFile:
"""Update a file.
@@ -569,17 +524,14 @@ async def update(
:type project_name: str
:param file_name: Name of the File. Required.
:type file_name: str
- :param parameters: Information about the file. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Information about the file. Is either a ProjectFile type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO[bytes]
:return: ProjectFile or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectFile
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -590,21 +542,19 @@ async def update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ProjectFile] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ProjectFile")
- request = build_update_request(
+ _request = build_update_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -614,15 +564,14 @@ async def update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -632,16 +581,12 @@ async def update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectFile", pipeline_response)
+ deserialized = self._deserialize("ProjectFile", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def read(
@@ -660,12 +605,11 @@ async def read(
:type project_name: str
:param file_name: Name of the File. Required.
:type file_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: FileStorageInfo or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.FileStorageInfo
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -676,27 +620,24 @@ async def read(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.FileStorageInfo] = kwargs.pop("cls", None)
- request = build_read_request(
+ _request = build_read_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
file_name=file_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.read.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -706,16 +647,12 @@ async def read(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("FileStorageInfo", pipeline_response)
+ deserialized = self._deserialize("FileStorageInfo", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- read.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}/read"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def read_write(
@@ -733,12 +670,11 @@ async def read_write(
:type project_name: str
:param file_name: Name of the File. Required.
:type file_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: FileStorageInfo or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.FileStorageInfo
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -749,27 +685,24 @@ async def read_write(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.FileStorageInfo] = kwargs.pop("cls", None)
- request = build_read_write_request(
+ _request = build_read_write_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
file_name=file_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.read_write.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -779,13 +712,9 @@ async def read_write(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("FileStorageInfo", pipeline_response)
+ deserialized = self._deserialize("FileStorageInfo", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- read_write.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}/readwrite"
- }
+ return deserialized # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_migration_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_migration_services_operations.py
new file mode 100644
index 000000000000..9b93f7906ebe
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_migration_services_operations.py
@@ -0,0 +1,871 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from io import IOBase
+import sys
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import urllib.parse
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.core.utils import case_insensitive_dict
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models as _models
+from ...operations._migration_services_operations import (
+ build_create_or_update_request,
+ build_delete_request,
+ build_get_request,
+ build_list_by_resource_group_request,
+ build_list_by_subscription_request,
+ build_list_migrations_request,
+ build_update_request,
+)
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+
+class MigrationServicesOperations:
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.mgmt.datamigration.aio.DataMigrationManagementClient`'s
+ :attr:`migration_services` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs) -> None:
+ input_args = list(args)
+ self._client = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ @distributed_trace_async
+ async def get(
+ self, resource_group_name: str, migration_service_name: str, **kwargs: Any
+ ) -> _models.MigrationService:
+ """Retrieve the Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :return: MigrationService or the result of cls(response)
+ :rtype: ~azure.mgmt.datamigration.models.MigrationService
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.MigrationService] = kwargs.pop("cls", None)
+
+ _request = build_get_request(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize("MigrationService", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: Union[_models.MigrationService, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(parameters, (IOBase, bytes)):
+ _content = parameters
+ else:
+ _json = self._serialize.body(parameters, "MigrationService")
+
+ _request = build_create_or_update_request(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ content_type=content_type,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: _models.MigrationService,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.MigrationService]:
+ """Create or Update Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :param parameters: Details of MigrationService resource. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationService
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns either MigrationService or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.MigrationService]:
+ """Create or Update Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :param parameters: Details of MigrationService resource. Required.
+ :type parameters: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns either MigrationService or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace_async
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: Union[_models.MigrationService, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.MigrationService]:
+ """Create or Update Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :param parameters: Details of MigrationService resource. Is either a MigrationService type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationService or IO[bytes]
+ :return: An instance of AsyncLROPoller that returns either MigrationService or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.MigrationService] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ parameters=parameters,
+ api_version=api_version,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ await raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize("MigrationService", pipeline_response.http_response)
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ if polling is True:
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return AsyncLROPoller[_models.MigrationService].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return AsyncLROPoller[_models.MigrationService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ async def _delete_initial(
+ self, resource_group_name: str, migration_service_name: str, **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
+
+ _request = build_delete_request(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace_async
+ async def begin_delete(
+ self, resource_group_name: str, migration_service_name: str, **kwargs: Any
+ ) -> AsyncLROPoller[None]:
+ """Delete Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ api_version=api_version,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ await raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
+ if cls:
+ return cls(pipeline_response, None, {}) # type: ignore
+
+ if polling is True:
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return AsyncLROPoller[None].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
+
+ async def _update_initial(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: Union[_models.MigrationServiceUpdate, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(parameters, (IOBase, bytes)):
+ _content = parameters
+ else:
+ _json = self._serialize.body(parameters, "MigrationServiceUpdate")
+
+ _request = build_update_request(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ content_type=content_type,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: _models.MigrationServiceUpdate,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.MigrationService]:
+ """Update Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :param parameters: Details of MigrationService resource. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationServiceUpdate
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns either MigrationService or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.MigrationService]:
+ """Update Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :param parameters: Details of MigrationService resource. Required.
+ :type parameters: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns either MigrationService or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace_async
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: Union[_models.MigrationServiceUpdate, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.MigrationService]:
+ """Update Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :param parameters: Details of MigrationService resource. Is either a MigrationServiceUpdate
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationServiceUpdate or IO[bytes]
+ :return: An instance of AsyncLROPoller that returns either MigrationService or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.MigrationService] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = await self._update_initial(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ parameters=parameters,
+ api_version=api_version,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ await raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize("MigrationService", pipeline_response.http_response)
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ if polling is True:
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return AsyncLROPoller[_models.MigrationService].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return AsyncLROPoller[_models.MigrationService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ @distributed_trace
+ def list_by_resource_group(
+ self, resource_group_name: str, **kwargs: Any
+ ) -> AsyncIterable["_models.MigrationService"]:
+ """Retrieve all migration services in the resource group.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :return: An iterator like instance of either MigrationService or the result of cls(response)
+ :rtype:
+ ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.MigrationServiceListResult] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_list_by_resource_group_request(
+ resource_group_name=resource_group_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize("MigrationServiceListResult", pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(get_next, extract_data)
+
+ @distributed_trace
+ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.MigrationService"]:
+ """Retrieve all migration services in the subscriptions.
+
+ :return: An iterator like instance of either MigrationService or the result of cls(response)
+ :rtype:
+ ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.MigrationServiceListResult] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_list_by_subscription_request(
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize("MigrationServiceListResult", pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(get_next, extract_data)
+
+ @distributed_trace
+ def list_migrations(
+ self, resource_group_name: str, migration_service_name: str, **kwargs: Any
+ ) -> AsyncIterable["_models.DatabaseMigrationBase"]:
+ """Retrieve the List of database migrations attached to the service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :return: An iterator like instance of either DatabaseMigrationBase or the result of
+ cls(response)
+ :rtype:
+ ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.DatabaseMigrationBase]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.DatabaseMigrationBaseListResult] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_list_migrations_request(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize("DatabaseMigrationBaseListResult", pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(get_next, extract_data)
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_operations.py
index 92b231c62180..e273f4abfc18 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -20,20 +19,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._operations import build_list_request
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -61,7 +58,6 @@ def __init__(self, *args, **kwargs) -> None:
def list(self, **kwargs: Any) -> AsyncIterable["_models.OperationsDefinition"]:
"""Lists all of the available SQL Migration REST API operations.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OperationsDefinition or the result of
cls(response)
:rtype:
@@ -71,12 +67,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.OperationsDefinition"]:
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -87,14 +81,12 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.OperationsDefinition"]:
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -106,13 +98,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("OperationListResult", pipeline_response)
@@ -122,10 +113,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -136,5 +128,3 @@ async def get_next(next_link=None):
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
-
- list.metadata = {"url": "/providers/Microsoft.DataMigration/operations"}
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_projects_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_projects_operations.py
index eae43bac1c2d..728592f4d707 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_projects_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_projects_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
@@ -20,15 +20,13 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._projects_operations import (
build_create_or_update_request,
build_delete_request,
@@ -37,10 +35,10 @@
build_update_request,
)
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -75,7 +73,6 @@ def list(self, group_name: str, service_name: str, **kwargs: Any) -> AsyncIterab
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Project or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.Project]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -83,12 +80,10 @@ def list(self, group_name: str, service_name: str, **kwargs: Any) -> AsyncIterab
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ProjectList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -99,17 +94,15 @@ def list(self, group_name: str, service_name: str, **kwargs: Any) -> AsyncIterab
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -121,13 +114,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProjectList", pipeline_response)
@@ -137,10 +129,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -153,10 +146,6 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects"
- }
-
@overload
async def create_or_update(
self,
@@ -184,7 +173,6 @@ async def create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Project or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises ~azure.core.exceptions.HttpResponseError:
@@ -196,7 +184,7 @@ async def create_or_update(
group_name: str,
service_name: str,
project_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -213,11 +201,10 @@ async def create_or_update(
:param project_name: Name of the project. Required.
:type project_name: str
:param parameters: Information about the project. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Project or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises ~azure.core.exceptions.HttpResponseError:
@@ -229,7 +216,7 @@ async def create_or_update(
group_name: str,
service_name: str,
project_name: str,
- parameters: Union[_models.Project, IO],
+ parameters: Union[_models.Project, IO[bytes]],
**kwargs: Any
) -> _models.Project:
"""Create or update project.
@@ -243,18 +230,14 @@ async def create_or_update(
:type service_name: str
:param project_name: Name of the project. Required.
:type project_name: str
- :param parameters: Information about the project. Is either a model type or a IO type.
+ :param parameters: Information about the project. Is either a Project type or a IO[bytes] type.
Required.
- :type parameters: ~azure.mgmt.datamigration.models.Project or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :type parameters: ~azure.mgmt.datamigration.models.Project or IO[bytes]
:return: Project or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -265,21 +248,19 @@ async def create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Project] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "Project")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -288,15 +269,14 @@ async def create_or_update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -306,21 +286,13 @@ async def create_or_update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("Project", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("Project", pipeline_response)
+ deserialized = self._deserialize("Project", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}"
- }
-
@distributed_trace_async
async def get(self, group_name: str, service_name: str, project_name: str, **kwargs: Any) -> _models.Project:
"""Get project information.
@@ -334,12 +306,11 @@ async def get(self, group_name: str, service_name: str, project_name: str, **kwa
:type service_name: str
:param project_name: Name of the project. Required.
:type project_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Project or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -350,26 +321,23 @@ async def get(self, group_name: str, service_name: str, project_name: str, **kwa
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.Project] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -379,19 +347,15 @@ async def get(self, group_name: str, service_name: str, project_name: str, **kwa
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("Project", pipeline_response)
+ deserialized = self._deserialize("Project", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}"
- }
+ return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
+ async def delete(
self,
group_name: str,
service_name: str,
@@ -413,12 +377,11 @@ async def delete( # pylint: disable=inconsistent-return-statements
:param delete_running_tasks: Delete the resource even if it contains running tasks. Default
value is None.
:type delete_running_tasks: bool
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -429,27 +392,24 @@ async def delete( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
subscription_id=self._config.subscription_id,
delete_running_tasks=delete_running_tasks,
api_version=api_version,
- template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -460,11 +420,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}"
- }
+ return cls(pipeline_response, None, {}) # type: ignore
@overload
async def update(
@@ -493,7 +449,6 @@ async def update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Project or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises ~azure.core.exceptions.HttpResponseError:
@@ -505,7 +460,7 @@ async def update(
group_name: str,
service_name: str,
project_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -522,11 +477,10 @@ async def update(
:param project_name: Name of the project. Required.
:type project_name: str
:param parameters: Information about the project. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Project or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises ~azure.core.exceptions.HttpResponseError:
@@ -538,7 +492,7 @@ async def update(
group_name: str,
service_name: str,
project_name: str,
- parameters: Union[_models.Project, IO],
+ parameters: Union[_models.Project, IO[bytes]],
**kwargs: Any
) -> _models.Project:
"""Update project.
@@ -552,18 +506,14 @@ async def update(
:type service_name: str
:param project_name: Name of the project. Required.
:type project_name: str
- :param parameters: Information about the project. Is either a model type or a IO type.
+ :param parameters: Information about the project. Is either a Project type or a IO[bytes] type.
Required.
- :type parameters: ~azure.mgmt.datamigration.models.Project or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :type parameters: ~azure.mgmt.datamigration.models.Project or IO[bytes]
:return: Project or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -574,21 +524,19 @@ async def update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Project] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "Project")
- request = build_update_request(
+ _request = build_update_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -597,15 +545,14 @@ async def update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -615,13 +562,9 @@ async def update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("Project", pipeline_response)
+ deserialized = self._deserialize("Project", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}"
- }
+ return deserialized # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_resource_skus_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_resource_skus_operations.py
index 9cafaf7e7f73..a2af113f7be2 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_resource_skus_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_resource_skus_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -20,20 +19,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._resource_skus_operations import build_list_skus_request
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -61,9 +58,8 @@ def __init__(self, *args, **kwargs) -> None:
def list_skus(self, **kwargs: Any) -> AsyncIterable["_models.ResourceSku"]:
"""Get supported SKUs.
- The skus action returns the list of SKUs that DMS supports.
+ The skus action returns the list of SKUs that DMS (classic) supports.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ResourceSku or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.ResourceSku]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -71,12 +67,10 @@ def list_skus(self, **kwargs: Any) -> AsyncIterable["_models.ResourceSku"]:
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ResourceSkusResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -87,15 +81,13 @@ def list_skus(self, **kwargs: Any) -> AsyncIterable["_models.ResourceSku"]:
def prepare_request(next_link=None):
if not next_link:
- request = build_list_skus_request(
+ _request = build_list_skus_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_skus.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -107,13 +99,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ResourceSkusResult", pipeline_response)
@@ -123,10 +114,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -138,5 +130,3 @@ async def get_next(next_link=None):
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
-
- list_skus.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/skus"}
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_service_tasks_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_service_tasks_operations.py
index acf45d6008c8..dd96285f318a 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_service_tasks_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_service_tasks_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
@@ -20,15 +20,13 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._service_tasks_operations import (
build_cancel_request,
build_create_or_update_request,
@@ -38,10 +36,10 @@
build_update_request,
)
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -71,10 +69,10 @@ def list(
) -> AsyncIterable["_models.ProjectTask"]:
"""Get service level tasks for a service.
- The services resource is the top-level resource that represents the Database Migration Service.
- This method returns a list of service level tasks owned by a service resource. Some tasks may
- have a status of Unknown, which indicates that an error occurred while querying the status of
- that task.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). This method returns a list of service level tasks owned by a service
+ resource. Some tasks may have a status of Unknown, which indicates that an error occurred while
+ querying the status of that task.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -82,7 +80,6 @@ def list(
:type service_name: str
:param task_type: Filter tasks by task type. Default value is None.
:type task_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProjectTask or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.ProjectTask]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -90,12 +87,10 @@ def list(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.TaskList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -106,18 +101,16 @@ def list(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
task_type=task_type,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -129,13 +122,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("TaskList", pipeline_response)
@@ -145,10 +137,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -161,10 +154,6 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks"
- }
-
@overload
async def create_or_update(
self,
@@ -179,9 +168,9 @@ async def create_or_update(
"""Create or update service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The PUT method creates a new service task or updates an existing one, although
- since service tasks have no mutable custom properties, there is little reason to update an
- existing one.
+ DMS (classic) instance. The PUT method creates a new service task or updates an existing one,
+ although since service tasks have no mutable custom properties, there is little reason to
+ update an existing one.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -194,7 +183,6 @@ async def create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -206,7 +194,7 @@ async def create_or_update(
group_name: str,
service_name: str,
task_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -214,9 +202,9 @@ async def create_or_update(
"""Create or update service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The PUT method creates a new service task or updates an existing one, although
- since service tasks have no mutable custom properties, there is little reason to update an
- existing one.
+ DMS (classic) instance. The PUT method creates a new service task or updates an existing one,
+ although since service tasks have no mutable custom properties, there is little reason to
+ update an existing one.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -225,11 +213,10 @@ async def create_or_update(
:param task_name: Name of the Task. Required.
:type task_name: str
:param parameters: Information about the task. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -241,15 +228,15 @@ async def create_or_update(
group_name: str,
service_name: str,
task_name: str,
- parameters: Union[_models.ProjectTask, IO],
+ parameters: Union[_models.ProjectTask, IO[bytes]],
**kwargs: Any
) -> _models.ProjectTask:
"""Create or update service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The PUT method creates a new service task or updates an existing one, although
- since service tasks have no mutable custom properties, there is little reason to update an
- existing one.
+ DMS (classic) instance. The PUT method creates a new service task or updates an existing one,
+ although since service tasks have no mutable custom properties, there is little reason to
+ update an existing one.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -257,17 +244,14 @@ async def create_or_update(
:type service_name: str
:param task_name: Name of the Task. Required.
:type task_name: str
- :param parameters: Information about the task. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes]
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -278,21 +262,19 @@ async def create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ProjectTask")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
group_name=group_name,
service_name=service_name,
task_name=task_name,
@@ -301,15 +283,14 @@ async def create_or_update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -319,21 +300,13 @@ async def create_or_update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("ProjectTask", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}"
- }
-
@distributed_trace_async
async def get(
self, group_name: str, service_name: str, task_name: str, expand: Optional[str] = None, **kwargs: Any
@@ -341,7 +314,7 @@ async def get(
"""Get service task information.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The GET method retrieves information about a service task.
+ DMS (classic) instance. The GET method retrieves information about a service task.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -351,12 +324,11 @@ async def get(
:type task_name: str
:param expand: Expand the response. Default value is None.
:type expand: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -367,27 +339,24 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
group_name=group_name,
service_name=service_name,
task_name=task_name,
subscription_id=self._config.subscription_id,
expand=expand,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -397,19 +366,15 @@ async def get(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}"
- }
+ return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
+ async def delete(
self,
group_name: str,
service_name: str,
@@ -420,7 +385,8 @@ async def delete( # pylint: disable=inconsistent-return-statements
"""Delete service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The DELETE method deletes a service task, canceling it first if it's running.
+ DMS (classic) instance. The DELETE method deletes a service task, canceling it first if it's
+ running.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -431,12 +397,11 @@ async def delete( # pylint: disable=inconsistent-return-statements
:param delete_running_tasks: Delete the resource even if it contains running tasks. Default
value is None.
:type delete_running_tasks: bool
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -447,27 +412,24 @@ async def delete( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
group_name=group_name,
service_name=service_name,
task_name=task_name,
subscription_id=self._config.subscription_id,
delete_running_tasks=delete_running_tasks,
api_version=api_version,
- template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -478,11 +440,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}"
- }
+ return cls(pipeline_response, None, {}) # type: ignore
@overload
async def update(
@@ -498,8 +456,8 @@ async def update(
"""Create or update service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The PATCH method updates an existing service task, but since service tasks have
- no mutable custom properties, there is little reason to do so.
+ DMS (classic) instance. The PATCH method updates an existing service task, but since service
+ tasks have no mutable custom properties, there is little reason to do so.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -512,7 +470,6 @@ async def update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -524,7 +481,7 @@ async def update(
group_name: str,
service_name: str,
task_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -532,8 +489,8 @@ async def update(
"""Create or update service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The PATCH method updates an existing service task, but since service tasks have
- no mutable custom properties, there is little reason to do so.
+ DMS (classic) instance. The PATCH method updates an existing service task, but since service
+ tasks have no mutable custom properties, there is little reason to do so.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -542,11 +499,10 @@ async def update(
:param task_name: Name of the Task. Required.
:type task_name: str
:param parameters: Information about the task. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -558,14 +514,14 @@ async def update(
group_name: str,
service_name: str,
task_name: str,
- parameters: Union[_models.ProjectTask, IO],
+ parameters: Union[_models.ProjectTask, IO[bytes]],
**kwargs: Any
) -> _models.ProjectTask:
"""Create or update service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The PATCH method updates an existing service task, but since service tasks have
- no mutable custom properties, there is little reason to do so.
+ DMS (classic) instance. The PATCH method updates an existing service task, but since service
+ tasks have no mutable custom properties, there is little reason to do so.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -573,17 +529,14 @@ async def update(
:type service_name: str
:param task_name: Name of the Task. Required.
:type task_name: str
- :param parameters: Information about the task. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes]
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -594,21 +547,19 @@ async def update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ProjectTask")
- request = build_update_request(
+ _request = build_update_request(
group_name=group_name,
service_name=service_name,
task_name=task_name,
@@ -617,15 +568,14 @@ async def update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -635,23 +585,19 @@ async def update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def cancel(self, group_name: str, service_name: str, task_name: str, **kwargs: Any) -> _models.ProjectTask:
"""Cancel a service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. This method cancels a service task if it's currently queued or running.
+ DMS (classic) instance. This method cancels a service task if it's currently queued or running.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -659,12 +605,11 @@ async def cancel(self, group_name: str, service_name: str, task_name: str, **kwa
:type service_name: str
:param task_name: Name of the Task. Required.
:type task_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -675,26 +620,23 @@ async def cancel(self, group_name: str, service_name: str, task_name: str, **kwa
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
- request = build_cancel_request(
+ _request = build_cancel_request(
group_name=group_name,
service_name=service_name,
task_name=task_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.cancel.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -704,13 +646,9 @@ async def cancel(self, group_name: str, service_name: str, task_name: str, **kwa
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- cancel.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}/cancel"
- }
+ return deserialized # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_services_operations.py
index 1b8569459c42..bd37f60d90eb 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_services_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_services_operations.py
@@ -6,8 +6,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -17,12 +18,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -30,7 +32,6 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._services_operations import (
build_check_children_name_availability_request,
build_check_name_availability_request,
@@ -46,10 +47,10 @@
build_update_request,
)
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -74,9 +75,13 @@ def __init__(self, *args, **kwargs) -> None:
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
async def _create_or_update_initial(
- self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any
- ) -> Optional[_models.DataMigrationService]:
- error_map = {
+ self,
+ group_name: str,
+ service_name: str,
+ parameters: Union[_models.DataMigrationService, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -87,21 +92,19 @@ async def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.DataMigrationService]] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "DataMigrationService")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
@@ -109,39 +112,34 @@ async def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize("DataMigrationService", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("DataMigrationService", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}"
- }
+ return deserialized # type: ignore
@overload
async def begin_create_or_update(
@@ -153,16 +151,16 @@ async def begin_create_or_update(
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.DataMigrationService]:
- """Create or update DMS Instance.
+ """Create or update DMS (classic) Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The PUT method creates a new service or updates an existing one. When a service is updated,
- existing child resources (i.e. tasks) are unaffected. Services currently support a single kind,
- "vm", which refers to a VM-based service, although other kinds may be added in the future. This
- method can change the kind, SKU, and network of the service, but if tasks are currently running
- (i.e. the service is busy), this will fail with 400 Bad Request ("ServiceIsBusy"). The provider
- will reply when successful with 200 OK or 201 Created. Long-running operations use the
- provisioningState property.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The PUT method creates a new service or updates an existing one. When a
+ service is updated, existing child resources (i.e. tasks) are unaffected. Services currently
+ support a single kind, "vm", which refers to a VM-based service, although other kinds may be
+ added in the future. This method can change the kind, SKU, and network of the service, but if
+ tasks are currently running (i.e. the service is busy), this will fail with 400 Bad Request
+ ("ServiceIsBusy"). The provider will reply when successful with 200 OK or 201 Created.
+ Long-running operations use the provisioningState property.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -173,14 +171,6 @@ async def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either DataMigrationService or the result
of cls(response)
:rtype:
@@ -193,39 +183,31 @@ async def begin_create_or_update(
self,
group_name: str,
service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.DataMigrationService]:
- """Create or update DMS Instance.
+ """Create or update DMS (classic) Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The PUT method creates a new service or updates an existing one. When a service is updated,
- existing child resources (i.e. tasks) are unaffected. Services currently support a single kind,
- "vm", which refers to a VM-based service, although other kinds may be added in the future. This
- method can change the kind, SKU, and network of the service, but if tasks are currently running
- (i.e. the service is busy), this will fail with 400 Bad Request ("ServiceIsBusy"). The provider
- will reply when successful with 200 OK or 201 Created. Long-running operations use the
- provisioningState property.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The PUT method creates a new service or updates an existing one. When a
+ service is updated, existing child resources (i.e. tasks) are unaffected. Services currently
+ support a single kind, "vm", which refers to a VM-based service, although other kinds may be
+ added in the future. This method can change the kind, SKU, and network of the service, but if
+ tasks are currently running (i.e. the service is busy), this will fail with 400 Bad Request
+ ("ServiceIsBusy"). The provider will reply when successful with 200 OK or 201 Created.
+ Long-running operations use the provisioningState property.
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
:param parameters: Information about the service. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either DataMigrationService or the result
of cls(response)
:rtype:
@@ -235,37 +217,30 @@ async def begin_create_or_update(
@distributed_trace_async
async def begin_create_or_update(
- self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any
+ self,
+ group_name: str,
+ service_name: str,
+ parameters: Union[_models.DataMigrationService, IO[bytes]],
+ **kwargs: Any
) -> AsyncLROPoller[_models.DataMigrationService]:
- """Create or update DMS Instance.
+ """Create or update DMS (classic) Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The PUT method creates a new service or updates an existing one. When a service is updated,
- existing child resources (i.e. tasks) are unaffected. Services currently support a single kind,
- "vm", which refers to a VM-based service, although other kinds may be added in the future. This
- method can change the kind, SKU, and network of the service, but if tasks are currently running
- (i.e. the service is busy), this will fail with 400 Bad Request ("ServiceIsBusy"). The provider
- will reply when successful with 200 OK or 201 Created. Long-running operations use the
- provisioningState property.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The PUT method creates a new service or updates an existing one. When a
+ service is updated, existing child resources (i.e. tasks) are unaffected. Services currently
+ support a single kind, "vm", which refers to a VM-based service, although other kinds may be
+ added in the future. This method can change the kind, SKU, and network of the service, but if
+ tasks are currently running (i.e. the service is busy), this will fail with 400 Bad Request
+ ("ServiceIsBusy"). The provider will reply when successful with 200 OK or 201 Created.
+ Long-running operations use the provisioningState property.
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :param parameters: Information about the service. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Information about the service. Is either a DataMigrationService type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO[bytes]
:return: An instance of AsyncLROPoller that returns either DataMigrationService or the result
of cls(response)
:rtype:
@@ -275,9 +250,7 @@ async def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DataMigrationService] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
@@ -295,12 +268,13 @@ async def begin_create_or_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("DataMigrationService", pipeline_response)
+ deserialized = self._deserialize("DataMigrationService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -310,35 +284,32 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.DataMigrationService].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}"
- }
+ return AsyncLROPoller[_models.DataMigrationService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace_async
async def get(self, group_name: str, service_name: str, **kwargs: Any) -> _models.DataMigrationService:
- """Get DMS Service Instance.
+ """Get DMS (classic) Service Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The GET method retrieves information about a service instance.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The GET method retrieves information about a service instance.
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: DataMigrationService or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DataMigrationService
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -349,25 +320,22 @@ async def get(self, group_name: str, service_name: str, **kwargs: Any) -> _model
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DataMigrationService] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -377,21 +345,17 @@ async def get(self, group_name: str, service_name: str, **kwargs: Any) -> _model
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("DataMigrationService", pipeline_response)
+ deserialized = self._deserialize("DataMigrationService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}"
- }
+ return deserialized # type: ignore
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
+ async def _delete_initial(
self, group_name: str, service_name: str, delete_running_tasks: Optional[bool] = None, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -402,50 +366,52 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
delete_running_tasks=delete_running_tasks,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(
self, group_name: str, service_name: str, delete_running_tasks: Optional[bool] = None, **kwargs: Any
) -> AsyncLROPoller[None]:
- """Delete DMS Service Instance.
+ """Delete DMS (classic) Service Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The DELETE method deletes a service. Any running tasks will be canceled.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The DELETE method deletes a service. Any running tasks will be canceled.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -454,14 +420,6 @@ async def begin_delete(
:param delete_running_tasks: Delete the resource even if it contains running tasks. Default
value is None.
:type delete_running_tasks: bool
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -469,15 +427,13 @@ async def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
group_name=group_name,
service_name=service_name,
delete_running_tasks=delete_running_tasks,
@@ -487,11 +443,12 @@ async def begin_delete(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
@@ -500,22 +457,22 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}"
- }
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
async def _update_initial(
- self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any
- ) -> Optional[_models.DataMigrationService]:
- error_map = {
+ self,
+ group_name: str,
+ service_name: str,
+ parameters: Union[_models.DataMigrationService, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -526,21 +483,19 @@ async def _update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.DataMigrationService]] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "DataMigrationService")
- request = build_update_request(
+ _request = build_update_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
@@ -548,36 +503,34 @@ async def _update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize("DataMigrationService", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- return deserialized
-
- _update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}"
- }
+ return deserialized # type: ignore
@overload
async def begin_update(
@@ -589,12 +542,12 @@ async def begin_update(
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.DataMigrationService]:
- """Create or update DMS Service Instance.
+ """Create or update DMS (classic) Service Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The PATCH method updates an existing service. This method can change the kind, SKU, and network
- of the service, but if tasks are currently running (i.e. the service is busy), this will fail
- with 400 Bad Request ("ServiceIsBusy").
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The PATCH method updates an existing service. This method can change the
+ kind, SKU, and network of the service, but if tasks are currently running (i.e. the service is
+ busy), this will fail with 400 Bad Request ("ServiceIsBusy").
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -605,14 +558,6 @@ async def begin_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either DataMigrationService or the result
of cls(response)
:rtype:
@@ -625,35 +570,27 @@ async def begin_update(
self,
group_name: str,
service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.DataMigrationService]:
- """Create or update DMS Service Instance.
+ """Create or update DMS (classic) Service Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The PATCH method updates an existing service. This method can change the kind, SKU, and network
- of the service, but if tasks are currently running (i.e. the service is busy), this will fail
- with 400 Bad Request ("ServiceIsBusy").
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The PATCH method updates an existing service. This method can change the
+ kind, SKU, and network of the service, but if tasks are currently running (i.e. the service is
+ busy), this will fail with 400 Bad Request ("ServiceIsBusy").
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
:param parameters: Information about the service. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either DataMigrationService or the result
of cls(response)
:rtype:
@@ -663,33 +600,26 @@ async def begin_update(
@distributed_trace_async
async def begin_update(
- self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any
+ self,
+ group_name: str,
+ service_name: str,
+ parameters: Union[_models.DataMigrationService, IO[bytes]],
+ **kwargs: Any
) -> AsyncLROPoller[_models.DataMigrationService]:
- """Create or update DMS Service Instance.
+ """Create or update DMS (classic) Service Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The PATCH method updates an existing service. This method can change the kind, SKU, and network
- of the service, but if tasks are currently running (i.e. the service is busy), this will fail
- with 400 Bad Request ("ServiceIsBusy").
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The PATCH method updates an existing service. This method can change the
+ kind, SKU, and network of the service, but if tasks are currently running (i.e. the service is
+ busy), this will fail with 400 Bad Request ("ServiceIsBusy").
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :param parameters: Information about the service. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Information about the service. Is either a DataMigrationService type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO[bytes]
:return: An instance of AsyncLROPoller that returns either DataMigrationService or the result
of cls(response)
:rtype:
@@ -699,9 +629,7 @@ async def begin_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DataMigrationService] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
@@ -719,12 +647,13 @@ async def begin_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("DataMigrationService", pipeline_response)
+ deserialized = self._deserialize("DataMigrationService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -734,17 +663,15 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.DataMigrationService].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}"
- }
+ return AsyncLROPoller[_models.DataMigrationService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace_async
async def check_status(
@@ -752,20 +679,19 @@ async def check_status(
) -> _models.DataMigrationServiceStatusResponse:
"""Check service health status.
- The services resource is the top-level resource that represents the Database Migration Service.
- This action performs a health check and returns the status of the service and virtual machine
- size.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). This action performs a health check and returns the status of the service
+ and virtual machine size.
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: DataMigrationServiceStatusResponse or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DataMigrationServiceStatusResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -776,25 +702,22 @@ async def check_status(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DataMigrationServiceStatusResponse] = kwargs.pop("cls", None)
- request = build_check_status_request(
+ _request = build_check_status_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.check_status.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -804,21 +727,15 @@ async def check_status(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("DataMigrationServiceStatusResponse", pipeline_response)
+ deserialized = self._deserialize("DataMigrationServiceStatusResponse", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- check_status.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/checkStatus"
- }
+ return deserialized # type: ignore
- async def _start_initial( # pylint: disable=inconsistent-return-statements
- self, group_name: str, service_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ async def _start_initial(self, group_name: str, service_name: str, **kwargs: Any) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -829,60 +746,55 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- request = build_start_request(
+ _request = build_start_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._start_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _start_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/start"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_start(self, group_name: str, service_name: str, **kwargs: Any) -> AsyncLROPoller[None]:
"""Start service.
- The services resource is the top-level resource that represents the Database Migration Service.
- This action starts the service and the service can be used for data migration.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). This action starts the service and the service can be used for data
+ migration.
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -890,15 +802,13 @@ async def begin_start(self, group_name: str, service_name: str, **kwargs: Any) -
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._start_initial( # type: ignore
+ raw_result = await self._start_initial(
group_name=group_name,
service_name=service_name,
api_version=api_version,
@@ -907,11 +817,12 @@ async def begin_start(self, group_name: str, service_name: str, **kwargs: Any) -
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
@@ -920,22 +831,16 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_start.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/start"
- }
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _stop_initial( # pylint: disable=inconsistent-return-statements
- self, group_name: str, service_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ async def _stop_initial(self, group_name: str, service_name: str, **kwargs: Any) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -946,61 +851,55 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- request = build_stop_request(
+ _request = build_stop_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._stop_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _stop_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/stop"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_stop(self, group_name: str, service_name: str, **kwargs: Any) -> AsyncLROPoller[None]:
"""Stop service.
- The services resource is the top-level resource that represents the Database Migration Service.
- This action stops the service and the service cannot be used for data migration. The service
- owner won't be billed when the service is stopped.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). This action stops the service and the service cannot be used for data
+ migration. The service owner won't be billed when the service is stopped.
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1008,15 +907,13 @@ async def begin_stop(self, group_name: str, service_name: str, **kwargs: Any) ->
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._stop_initial( # type: ignore
+ raw_result = await self._stop_initial(
group_name=group_name,
service_name=service_name,
api_version=api_version,
@@ -1025,11 +922,12 @@ async def begin_stop(self, group_name: str, service_name: str, **kwargs: Any) ->
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
@@ -1038,17 +936,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_stop.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/stop"
- }
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace
def list_skus(
@@ -1056,14 +950,13 @@ def list_skus(
) -> AsyncIterable["_models.AvailableServiceSku"]:
"""Get compatible SKUs.
- The services resource is the top-level resource that represents the Database Migration Service.
- The skus action returns the list of SKUs that a service resource can be updated to.
+ The services resource is the top-level resource that represents the Database Migration Service
+ (classic). The skus action returns the list of SKUs that a service resource can be updated to.
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AvailableServiceSku or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.AvailableServiceSku]
@@ -1072,12 +965,10 @@ def list_skus(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ServiceSkuList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1088,17 +979,15 @@ def list_skus(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_skus_request(
+ _request = build_list_skus_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_skus.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1110,13 +999,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ServiceSkuList", pipeline_response)
@@ -1126,10 +1014,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1142,10 +1031,6 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list_skus.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/skus"
- }
-
@overload
async def check_children_name_availability(
self,
@@ -1169,7 +1054,6 @@ async def check_children_name_availability(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: NameAvailabilityResponse or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1180,7 +1064,7 @@ async def check_children_name_availability(
self,
group_name: str,
service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -1194,11 +1078,10 @@ async def check_children_name_availability(
:param service_name: Name of the service. Required.
:type service_name: str
:param parameters: Requested name to validate. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: NameAvailabilityResponse or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1206,7 +1089,11 @@ async def check_children_name_availability(
@distributed_trace_async
async def check_children_name_availability(
- self, group_name: str, service_name: str, parameters: Union[_models.NameAvailabilityRequest, IO], **kwargs: Any
+ self,
+ group_name: str,
+ service_name: str,
+ parameters: Union[_models.NameAvailabilityRequest, IO[bytes]],
+ **kwargs: Any
) -> _models.NameAvailabilityResponse:
"""Check nested resource name validity and availability.
@@ -1216,17 +1103,14 @@ async def check_children_name_availability(
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :param parameters: Requested name to validate. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Requested name to validate. Is either a NameAvailabilityRequest type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO[bytes]
:return: NameAvailabilityResponse or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1237,21 +1121,19 @@ async def check_children_name_availability(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.NameAvailabilityResponse] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "NameAvailabilityRequest")
- request = build_check_children_name_availability_request(
+ _request = build_check_children_name_availability_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
@@ -1259,15 +1141,14 @@ async def check_children_name_availability(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.check_children_name_availability.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1277,27 +1158,22 @@ async def check_children_name_availability(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response)
+ deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- return deserialized
-
- check_children_name_availability.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/checkNameAvailability"
- }
+ return deserialized # type: ignore
@distributed_trace
def list_by_resource_group(self, group_name: str, **kwargs: Any) -> AsyncIterable["_models.DataMigrationService"]:
"""Get services in resource group.
- The Services resource is the top-level resource that represents the Database Migration Service.
- This method returns a list of service resources in a resource group.
+ The Services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). This method returns a list of service resources in a resource group.
:param group_name: Name of the resource group. Required.
:type group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DataMigrationService or the result of
cls(response)
:rtype:
@@ -1307,12 +1183,10 @@ def list_by_resource_group(self, group_name: str, **kwargs: Any) -> AsyncIterabl
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DataMigrationServiceList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1323,16 +1197,14 @@ def list_by_resource_group(self, group_name: str, **kwargs: Any) -> AsyncIterabl
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_resource_group_request(
+ _request = build_list_by_resource_group_request(
group_name=group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1344,13 +1216,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("DataMigrationServiceList", pipeline_response)
@@ -1360,10 +1231,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1376,18 +1248,13 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list_by_resource_group.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services"
- }
-
@distributed_trace
def list(self, **kwargs: Any) -> AsyncIterable["_models.DataMigrationService"]:
"""Get services in subscription.
- The services resource is the top-level resource that represents the Database Migration Service.
- This method returns a list of service resources in a subscription.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). This method returns a list of service resources in a subscription.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DataMigrationService or the result of
cls(response)
:rtype:
@@ -1397,12 +1264,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.DataMigrationService"]:
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DataMigrationServiceList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1413,15 +1278,13 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.DataMigrationService"]:
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1433,13 +1296,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("DataMigrationServiceList", pipeline_response)
@@ -1449,10 +1311,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1465,8 +1328,6 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/services"}
-
@overload
async def check_name_availability(
self,
@@ -1487,7 +1348,6 @@ async def check_name_availability(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: NameAvailabilityResponse or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1495,7 +1355,7 @@ async def check_name_availability(
@overload
async def check_name_availability(
- self, location: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
+ self, location: str, parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any
) -> _models.NameAvailabilityResponse:
"""Check name validity and availability.
@@ -1504,11 +1364,10 @@ async def check_name_availability(
:param location: The Azure region of the operation. Required.
:type location: str
:param parameters: Requested name to validate. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: NameAvailabilityResponse or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1516,7 +1375,7 @@ async def check_name_availability(
@distributed_trace_async
async def check_name_availability(
- self, location: str, parameters: Union[_models.NameAvailabilityRequest, IO], **kwargs: Any
+ self, location: str, parameters: Union[_models.NameAvailabilityRequest, IO[bytes]], **kwargs: Any
) -> _models.NameAvailabilityResponse:
"""Check name validity and availability.
@@ -1524,17 +1383,14 @@ async def check_name_availability(
:param location: The Azure region of the operation. Required.
:type location: str
- :param parameters: Requested name to validate. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Requested name to validate. Is either a NameAvailabilityRequest type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO[bytes]
:return: NameAvailabilityResponse or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1545,36 +1401,33 @@ async def check_name_availability(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.NameAvailabilityResponse] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "NameAvailabilityRequest")
- request = build_check_name_availability_request(
+ _request = build_check_name_availability_request(
location=location,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
- template_url=self.check_name_availability.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1584,13 +1437,9 @@ async def check_name_availability(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response)
+ deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- check_name_availability.metadata = {
- "url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/locations/{location}/checkNameAvailability"
- }
+ return deserialized # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_sql_migration_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_sql_migration_services_operations.py
index 179add632186..d822b06d81f6 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_sql_migration_services_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_sql_migration_services_operations.py
@@ -6,8 +6,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -17,12 +18,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -30,7 +32,6 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._sql_migration_services_operations import (
build_create_or_update_request,
build_delete_node_request,
@@ -45,10 +46,10 @@
build_update_request,
)
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -83,12 +84,11 @@ async def get(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: SqlMigrationService or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.SqlMigrationService
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -99,25 +99,22 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -126,25 +123,21 @@ async def get(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("SqlMigrationService", pipeline_response)
+ deserialized = self._deserialize("SqlMigrationService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}"
- }
+ return deserialized # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: Union[_models.SqlMigrationService, IO],
+ parameters: Union[_models.SqlMigrationService, IO[bytes]],
**kwargs: Any
- ) -> _models.SqlMigrationService:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -155,21 +148,19 @@ async def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "SqlMigrationService")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
@@ -177,38 +168,34 @@ async def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("SqlMigrationService", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("SqlMigrationService", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}"
- }
-
@overload
async def begin_create_or_update(
self,
@@ -231,14 +218,6 @@ async def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either SqlMigrationService or the result of
cls(response)
:rtype:
@@ -251,7 +230,7 @@ async def begin_create_or_update(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -264,18 +243,10 @@ async def begin_create_or_update(
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
:param parameters: Details of SqlMigrationService resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either SqlMigrationService or the result of
cls(response)
:rtype:
@@ -288,7 +259,7 @@ async def begin_create_or_update(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: Union[_models.SqlMigrationService, IO],
+ parameters: Union[_models.SqlMigrationService, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.SqlMigrationService]:
"""Create or Update Database Migration Service.
@@ -298,20 +269,9 @@ async def begin_create_or_update(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO
- type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationService or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Details of SqlMigrationService resource. Is either a SqlMigrationService
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationService or IO[bytes]
:return: An instance of AsyncLROPoller that returns either SqlMigrationService or the result of
cls(response)
:rtype:
@@ -321,9 +281,7 @@ async def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
@@ -341,12 +299,13 @@ async def begin_create_or_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("SqlMigrationService", pipeline_response)
+ deserialized = self._deserialize("SqlMigrationService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -356,22 +315,20 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.SqlMigrationService].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}"
- }
+ return AsyncLROPoller[_models.SqlMigrationService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
+ async def _delete_initial(
self, resource_group_name: str, sql_migration_service_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -382,39 +339,41 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(
@@ -427,14 +386,6 @@ async def begin_delete(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -442,15 +393,13 @@ async def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
api_version=api_version,
@@ -459,11 +408,12 @@ async def begin_delete(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
@@ -472,26 +422,22 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}"
- }
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
async def _update_initial(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: Union[_models.SqlMigrationServiceUpdate, IO],
+ parameters: Union[_models.SqlMigrationServiceUpdate, IO[bytes]],
**kwargs: Any
- ) -> _models.SqlMigrationService:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -502,21 +448,19 @@ async def _update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "SqlMigrationServiceUpdate")
- request = build_update_request(
+ _request = build_update_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
@@ -524,38 +468,34 @@ async def _update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("SqlMigrationService", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("SqlMigrationService", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}"
- }
-
@overload
async def begin_update(
self,
@@ -578,14 +518,6 @@ async def begin_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either SqlMigrationService or the result of
cls(response)
:rtype:
@@ -598,7 +530,7 @@ async def begin_update(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -611,18 +543,10 @@ async def begin_update(
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
:param parameters: Details of SqlMigrationService resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either SqlMigrationService or the result of
cls(response)
:rtype:
@@ -635,7 +559,7 @@ async def begin_update(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: Union[_models.SqlMigrationServiceUpdate, IO],
+ parameters: Union[_models.SqlMigrationServiceUpdate, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.SqlMigrationService]:
"""Update Database Migration Service.
@@ -645,20 +569,9 @@ async def begin_update(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO
- type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationServiceUpdate or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Details of SqlMigrationService resource. Is either a
+ SqlMigrationServiceUpdate type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationServiceUpdate or IO[bytes]
:return: An instance of AsyncLROPoller that returns either SqlMigrationService or the result of
cls(response)
:rtype:
@@ -668,9 +581,7 @@ async def begin_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
@@ -688,12 +599,13 @@ async def begin_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("SqlMigrationService", pipeline_response)
+ deserialized = self._deserialize("SqlMigrationService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -703,17 +615,15 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.SqlMigrationService].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}"
- }
+ return AsyncLROPoller[_models.SqlMigrationService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace
def list_by_resource_group(
@@ -724,7 +634,6 @@ def list_by_resource_group(
:param resource_group_name: Name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SqlMigrationService or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.SqlMigrationService]
@@ -733,12 +642,10 @@ def list_by_resource_group(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.SqlMigrationListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -749,16 +656,14 @@ def list_by_resource_group(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_resource_group_request(
+ _request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -770,13 +675,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("SqlMigrationListResult", pipeline_response)
@@ -786,10 +690,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -801,10 +706,6 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list_by_resource_group.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices"
- }
-
@distributed_trace_async
async def list_auth_keys(
self, resource_group_name: str, sql_migration_service_name: str, **kwargs: Any
@@ -816,12 +717,11 @@ async def list_auth_keys(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: AuthenticationKeys or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.AuthenticationKeys
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -832,25 +732,22 @@ async def list_auth_keys(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.AuthenticationKeys] = kwargs.pop("cls", None)
- request = build_list_auth_keys_request(
+ _request = build_list_auth_keys_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_auth_keys.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -859,16 +756,12 @@ async def list_auth_keys(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("AuthenticationKeys", pipeline_response)
+ deserialized = self._deserialize("AuthenticationKeys", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- list_auth_keys.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/listAuthKeys"
- }
+ return deserialized # type: ignore
@overload
async def regenerate_auth_keys(
@@ -892,7 +785,6 @@ async def regenerate_auth_keys(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: RegenAuthKeys or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.RegenAuthKeys
:raises ~azure.core.exceptions.HttpResponseError:
@@ -903,7 +795,7 @@ async def regenerate_auth_keys(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -916,11 +808,10 @@ async def regenerate_auth_keys(
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
:param parameters: Details of SqlMigrationService resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: RegenAuthKeys or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.RegenAuthKeys
:raises ~azure.core.exceptions.HttpResponseError:
@@ -931,7 +822,7 @@ async def regenerate_auth_keys(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: Union[_models.RegenAuthKeys, IO],
+ parameters: Union[_models.RegenAuthKeys, IO[bytes]],
**kwargs: Any
) -> _models.RegenAuthKeys:
"""Regenerate a new set of Authentication Keys for Self Hosted Integration Runtime.
@@ -941,18 +832,14 @@ async def regenerate_auth_keys(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO
- type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.RegenAuthKeys or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Details of SqlMigrationService resource. Is either a RegenAuthKeys type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.RegenAuthKeys or IO[bytes]
:return: RegenAuthKeys or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.RegenAuthKeys
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -963,21 +850,19 @@ async def regenerate_auth_keys(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.RegenAuthKeys] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "RegenAuthKeys")
- request = build_regenerate_auth_keys_request(
+ _request = build_regenerate_auth_keys_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
@@ -985,15 +870,14 @@ async def regenerate_auth_keys(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.regenerate_auth_keys.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1002,16 +886,12 @@ async def regenerate_auth_keys(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("RegenAuthKeys", pipeline_response)
+ deserialized = self._deserialize("RegenAuthKeys", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- regenerate_auth_keys.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/regenerateAuthKeys"
- }
+ return deserialized # type: ignore
@overload
async def delete_node(
@@ -1035,7 +915,6 @@ async def delete_node(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: DeleteNode or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DeleteNode
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1046,7 +925,7 @@ async def delete_node(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -1059,11 +938,10 @@ async def delete_node(
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
:param parameters: Details of SqlMigrationService resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: DeleteNode or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DeleteNode
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1074,7 +952,7 @@ async def delete_node(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: Union[_models.DeleteNode, IO],
+ parameters: Union[_models.DeleteNode, IO[bytes]],
**kwargs: Any
) -> _models.DeleteNode:
"""Delete the integration runtime node.
@@ -1084,18 +962,14 @@ async def delete_node(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO
- type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.DeleteNode or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Details of SqlMigrationService resource. Is either a DeleteNode type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DeleteNode or IO[bytes]
:return: DeleteNode or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DeleteNode
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1106,21 +980,19 @@ async def delete_node(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DeleteNode] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "DeleteNode")
- request = build_delete_node_request(
+ _request = build_delete_node_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
@@ -1128,15 +1000,14 @@ async def delete_node(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.delete_node.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1145,16 +1016,12 @@ async def delete_node(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("DeleteNode", pipeline_response)
+ deserialized = self._deserialize("DeleteNode", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- delete_node.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/deleteNode"
- }
+ return deserialized # type: ignore
@distributed_trace
def list_migrations(
@@ -1167,7 +1034,6 @@ def list_migrations(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DatabaseMigration or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.DatabaseMigration]
@@ -1176,12 +1042,10 @@ def list_migrations(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DatabaseMigrationListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1192,17 +1056,15 @@ def list_migrations(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_migrations_request(
+ _request = build_list_migrations_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_migrations.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1214,13 +1076,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("DatabaseMigrationListResult", pipeline_response)
@@ -1230,10 +1091,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1245,10 +1107,6 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list_migrations.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/listMigrations"
- }
-
@distributed_trace_async
async def list_monitoring_data(
self, resource_group_name: str, sql_migration_service_name: str, **kwargs: Any
@@ -1261,12 +1119,11 @@ async def list_monitoring_data(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: IntegrationRuntimeMonitoringData or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.IntegrationRuntimeMonitoringData
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1277,25 +1134,22 @@ async def list_monitoring_data(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.IntegrationRuntimeMonitoringData] = kwargs.pop("cls", None)
- request = build_list_monitoring_data_request(
+ _request = build_list_monitoring_data_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_monitoring_data.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1304,22 +1158,17 @@ async def list_monitoring_data(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response)
+ deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- list_monitoring_data.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/listMonitoringData"
- }
+ return deserialized # type: ignore
@distributed_trace
def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.SqlMigrationService"]:
"""Retrieve all SQL migration services in the subscriptions.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SqlMigrationService or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.SqlMigrationService]
@@ -1328,12 +1177,10 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.SqlMigra
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.SqlMigrationListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1344,15 +1191,13 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.SqlMigra
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_subscription_request(
+ _request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_subscription.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1364,13 +1209,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("SqlMigrationListResult", pipeline_response)
@@ -1380,10 +1224,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1394,7 +1239,3 @@ async def get_next(next_link=None):
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
-
- list_by_subscription.metadata = {
- "url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/sqlMigrationServices"
- }
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_tasks_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_tasks_operations.py
index fbe2cf4e36a3..5dacd66cd535 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_tasks_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_tasks_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
@@ -20,15 +20,13 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._tasks_operations import (
build_cancel_request,
build_command_request,
@@ -39,10 +37,10 @@
build_update_request,
)
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -72,9 +70,10 @@ def list(
) -> AsyncIterable["_models.ProjectTask"]:
"""Get tasks in a service.
- The services resource is the top-level resource that represents the Database Migration Service.
- This method returns a list of tasks owned by a service resource. Some tasks may have a status
- of Unknown, which indicates that an error occurred while querying the status of that task.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). This method returns a list of tasks owned by a service resource. Some tasks
+ may have a status of Unknown, which indicates that an error occurred while querying the status
+ of that task.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -84,7 +83,6 @@ def list(
:type project_name: str
:param task_type: Filter tasks by task type. Default value is None.
:type task_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProjectTask or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.ProjectTask]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -92,12 +90,10 @@ def list(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.TaskList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -108,19 +104,17 @@ def list(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
subscription_id=self._config.subscription_id,
task_type=task_type,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -132,13 +126,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("TaskList", pipeline_response)
@@ -148,10 +141,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -164,10 +158,6 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks"
- }
-
@overload
async def create_or_update(
self,
@@ -183,8 +173,9 @@ async def create_or_update(
"""Create or update task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The PUT method creates a new task or updates an existing one, although since tasks
- have no mutable custom properties, there is little reason to update an existing one.
+ (classic) instance. The PUT method creates a new task or updates an existing one, although
+ since tasks have no mutable custom properties, there is little reason to update an existing
+ one.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -199,7 +190,6 @@ async def create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -212,7 +202,7 @@ async def create_or_update(
service_name: str,
project_name: str,
task_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -220,8 +210,9 @@ async def create_or_update(
"""Create or update task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The PUT method creates a new task or updates an existing one, although since tasks
- have no mutable custom properties, there is little reason to update an existing one.
+ (classic) instance. The PUT method creates a new task or updates an existing one, although
+ since tasks have no mutable custom properties, there is little reason to update an existing
+ one.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -232,11 +223,10 @@ async def create_or_update(
:param task_name: Name of the Task. Required.
:type task_name: str
:param parameters: Information about the task. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -249,14 +239,15 @@ async def create_or_update(
service_name: str,
project_name: str,
task_name: str,
- parameters: Union[_models.ProjectTask, IO],
+ parameters: Union[_models.ProjectTask, IO[bytes]],
**kwargs: Any
) -> _models.ProjectTask:
"""Create or update task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The PUT method creates a new task or updates an existing one, although since tasks
- have no mutable custom properties, there is little reason to update an existing one.
+ (classic) instance. The PUT method creates a new task or updates an existing one, although
+ since tasks have no mutable custom properties, there is little reason to update an existing
+ one.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -266,17 +257,14 @@ async def create_or_update(
:type project_name: str
:param task_name: Name of the Task. Required.
:type task_name: str
- :param parameters: Information about the task. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes]
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -287,21 +275,19 @@ async def create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ProjectTask")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -311,15 +297,14 @@ async def create_or_update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -329,21 +314,13 @@ async def create_or_update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("ProjectTask", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}"
- }
-
@distributed_trace_async
async def get(
self,
@@ -357,7 +334,7 @@ async def get(
"""Get task information.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The GET method retrieves information about a task.
+ (classic) instance. The GET method retrieves information about a task.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -369,12 +346,11 @@ async def get(
:type task_name: str
:param expand: Expand the response. Default value is None.
:type expand: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -385,12 +361,10 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -398,15 +372,14 @@ async def get(
subscription_id=self._config.subscription_id,
expand=expand,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -416,19 +389,15 @@ async def get(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}"
- }
+ return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
+ async def delete(
self,
group_name: str,
service_name: str,
@@ -440,7 +409,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
"""Delete task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The DELETE method deletes a task, canceling it first if it's running.
+ (classic) instance. The DELETE method deletes a task, canceling it first if it's running.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -453,12 +422,11 @@ async def delete( # pylint: disable=inconsistent-return-statements
:param delete_running_tasks: Delete the resource even if it contains running tasks. Default
value is None.
:type delete_running_tasks: bool
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -469,12 +437,10 @@ async def delete( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -482,15 +448,14 @@ async def delete( # pylint: disable=inconsistent-return-statements
subscription_id=self._config.subscription_id,
delete_running_tasks=delete_running_tasks,
api_version=api_version,
- template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -501,11 +466,7 @@ async def delete( # pylint: disable=inconsistent-return-statements
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}"
- }
+ return cls(pipeline_response, None, {}) # type: ignore
@overload
async def update(
@@ -522,8 +483,8 @@ async def update(
"""Create or update task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The PATCH method updates an existing task, but since tasks have no mutable custom
- properties, there is little reason to do so.
+ (classic) instance. The PATCH method updates an existing task, but since tasks have no mutable
+ custom properties, there is little reason to do so.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -538,7 +499,6 @@ async def update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -551,7 +511,7 @@ async def update(
service_name: str,
project_name: str,
task_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -559,8 +519,8 @@ async def update(
"""Create or update task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The PATCH method updates an existing task, but since tasks have no mutable custom
- properties, there is little reason to do so.
+ (classic) instance. The PATCH method updates an existing task, but since tasks have no mutable
+ custom properties, there is little reason to do so.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -571,11 +531,10 @@ async def update(
:param task_name: Name of the Task. Required.
:type task_name: str
:param parameters: Information about the task. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -588,14 +547,14 @@ async def update(
service_name: str,
project_name: str,
task_name: str,
- parameters: Union[_models.ProjectTask, IO],
+ parameters: Union[_models.ProjectTask, IO[bytes]],
**kwargs: Any
) -> _models.ProjectTask:
"""Create or update task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The PATCH method updates an existing task, but since tasks have no mutable custom
- properties, there is little reason to do so.
+ (classic) instance. The PATCH method updates an existing task, but since tasks have no mutable
+ custom properties, there is little reason to do so.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -605,17 +564,14 @@ async def update(
:type project_name: str
:param task_name: Name of the Task. Required.
:type task_name: str
- :param parameters: Information about the task. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes]
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -626,21 +582,19 @@ async def update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ProjectTask")
- request = build_update_request(
+ _request = build_update_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -650,15 +604,14 @@ async def update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -668,16 +621,12 @@ async def update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def cancel(
@@ -686,7 +635,7 @@ async def cancel(
"""Cancel a task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. This method cancels a task if it's currently queued or running.
+ (classic) instance. This method cancels a task if it's currently queued or running.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -696,12 +645,11 @@ async def cancel(
:type project_name: str
:param task_name: Name of the Task. Required.
:type task_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -712,27 +660,24 @@ async def cancel(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
- request = build_cancel_request(
+ _request = build_cancel_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
task_name=task_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.cancel.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -742,16 +687,12 @@ async def cancel(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- cancel.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}/cancel"
- }
+ return deserialized # type: ignore
@overload
async def command(
@@ -768,7 +709,7 @@ async def command(
"""Execute a command on a task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. This method executes a command on a running task.
+ (classic) instance. This method executes a command on a running task.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -783,7 +724,6 @@ async def command(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: CommandProperties or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.CommandProperties
:raises ~azure.core.exceptions.HttpResponseError:
@@ -796,7 +736,7 @@ async def command(
service_name: str,
project_name: str,
task_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -804,7 +744,7 @@ async def command(
"""Execute a command on a task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. This method executes a command on a running task.
+ (classic) instance. This method executes a command on a running task.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -815,11 +755,10 @@ async def command(
:param task_name: Name of the Task. Required.
:type task_name: str
:param parameters: Command to execute. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: CommandProperties or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.CommandProperties
:raises ~azure.core.exceptions.HttpResponseError:
@@ -832,13 +771,13 @@ async def command(
service_name: str,
project_name: str,
task_name: str,
- parameters: Union[_models.CommandProperties, IO],
+ parameters: Union[_models.CommandProperties, IO[bytes]],
**kwargs: Any
) -> _models.CommandProperties:
"""Execute a command on a task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. This method executes a command on a running task.
+ (classic) instance. This method executes a command on a running task.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -848,17 +787,14 @@ async def command(
:type project_name: str
:param task_name: Name of the Task. Required.
:type task_name: str
- :param parameters: Command to execute. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.CommandProperties or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Command to execute. Is either a CommandProperties type or a IO[bytes] type.
+ Required.
+ :type parameters: ~azure.mgmt.datamigration.models.CommandProperties or IO[bytes]
:return: CommandProperties or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.CommandProperties
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -869,21 +805,19 @@ async def command(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.CommandProperties] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "CommandProperties")
- request = build_command_request(
+ _request = build_command_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -893,15 +827,14 @@ async def command(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.command.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -911,13 +844,9 @@ async def command(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("CommandProperties", pipeline_response)
+ deserialized = self._deserialize("CommandProperties", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- command.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}/command"
- }
+ return deserialized # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_usages_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_usages_operations.py
index d652bf441d62..62be4fc8419e 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_usages_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_usages_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -20,20 +19,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._usages_operations import build_list_request
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -61,12 +58,11 @@ def __init__(self, *args, **kwargs) -> None:
def list(self, location: str, **kwargs: Any) -> AsyncIterable["_models.Quota"]:
"""Get resource quotas and usage information.
- This method returns region-specific quotas and resource usage information for the Database
- Migration Service.
+ This method returns region-specific quotas and resource usage information for the Azure
+ Database Migration Service (classic).
:param location: The Azure region of the operation. Required.
:type location: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Quota or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.Quota]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -74,12 +70,10 @@ def list(self, location: str, **kwargs: Any) -> AsyncIterable["_models.Quota"]:
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.QuotaList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -90,16 +84,14 @@ def list(self, location: str, **kwargs: Any) -> AsyncIterable["_models.Quota"]:
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
location=location,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -111,13 +103,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("QuotaList", pipeline_response)
@@ -127,10 +118,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -142,7 +134,3 @@ async def get_next(next_link=None):
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
-
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/locations/{location}/usages"
- }
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/__init__.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/__init__.py
index 3a7960413618..95cfe7489461 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/__init__.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/__init__.py
@@ -5,381 +5,417 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._models_py3 import ApiError
-from ._models_py3 import AuthenticationKeys
-from ._models_py3 import AvailableServiceSku
-from ._models_py3 import AvailableServiceSkuCapacity
-from ._models_py3 import AvailableServiceSkuSku
-from ._models_py3 import AzureActiveDirectoryApp
-from ._models_py3 import AzureBlob
-from ._models_py3 import BackupConfiguration
-from ._models_py3 import BackupFileInfo
-from ._models_py3 import BackupSetInfo
-from ._models_py3 import BlobShare
-from ._models_py3 import CheckOCIDriverTaskInput
-from ._models_py3 import CheckOCIDriverTaskOutput
-from ._models_py3 import CheckOCIDriverTaskProperties
-from ._models_py3 import CommandProperties
-from ._models_py3 import ConnectToMongoDbTaskProperties
-from ._models_py3 import ConnectToSourceMySqlTaskInput
-from ._models_py3 import ConnectToSourceMySqlTaskProperties
-from ._models_py3 import ConnectToSourceNonSqlTaskOutput
-from ._models_py3 import ConnectToSourceOracleSyncTaskInput
-from ._models_py3 import ConnectToSourceOracleSyncTaskOutput
-from ._models_py3 import ConnectToSourceOracleSyncTaskProperties
-from ._models_py3 import ConnectToSourcePostgreSqlSyncTaskInput
-from ._models_py3 import ConnectToSourcePostgreSqlSyncTaskOutput
-from ._models_py3 import ConnectToSourcePostgreSqlSyncTaskProperties
-from ._models_py3 import ConnectToSourceSqlServerSyncTaskProperties
-from ._models_py3 import ConnectToSourceSqlServerTaskInput
-from ._models_py3 import ConnectToSourceSqlServerTaskOutput
-from ._models_py3 import ConnectToSourceSqlServerTaskOutputAgentJobLevel
-from ._models_py3 import ConnectToSourceSqlServerTaskOutputDatabaseLevel
-from ._models_py3 import ConnectToSourceSqlServerTaskOutputLoginLevel
-from ._models_py3 import ConnectToSourceSqlServerTaskOutputTaskLevel
-from ._models_py3 import ConnectToSourceSqlServerTaskProperties
-from ._models_py3 import ConnectToTargetAzureDbForMySqlTaskInput
-from ._models_py3 import ConnectToTargetAzureDbForMySqlTaskOutput
-from ._models_py3 import ConnectToTargetAzureDbForMySqlTaskProperties
-from ._models_py3 import ConnectToTargetAzureDbForPostgreSqlSyncTaskInput
-from ._models_py3 import ConnectToTargetAzureDbForPostgreSqlSyncTaskOutput
-from ._models_py3 import ConnectToTargetAzureDbForPostgreSqlSyncTaskProperties
-from ._models_py3 import ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskInput
-from ._models_py3 import ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutput
-from ._models_py3 import ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapItem
-from ._models_py3 import ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskProperties
-from ._models_py3 import ConnectToTargetSqlDbSyncTaskInput
-from ._models_py3 import ConnectToTargetSqlDbSyncTaskProperties
-from ._models_py3 import ConnectToTargetSqlDbTaskInput
-from ._models_py3 import ConnectToTargetSqlDbTaskOutput
-from ._models_py3 import ConnectToTargetSqlDbTaskProperties
-from ._models_py3 import ConnectToTargetSqlMISyncTaskInput
-from ._models_py3 import ConnectToTargetSqlMISyncTaskOutput
-from ._models_py3 import ConnectToTargetSqlMISyncTaskProperties
-from ._models_py3 import ConnectToTargetSqlMITaskInput
-from ._models_py3 import ConnectToTargetSqlMITaskOutput
-from ._models_py3 import ConnectToTargetSqlMITaskProperties
-from ._models_py3 import ConnectionInfo
-from ._models_py3 import CopyProgressDetails
-from ._models_py3 import DataIntegrityValidationResult
-from ._models_py3 import DataItemMigrationSummaryResult
-from ._models_py3 import DataMigrationError
-from ._models_py3 import DataMigrationProjectMetadata
-from ._models_py3 import DataMigrationService
-from ._models_py3 import DataMigrationServiceList
-from ._models_py3 import DataMigrationServiceStatusResponse
-from ._models_py3 import Database
-from ._models_py3 import DatabaseBackupInfo
-from ._models_py3 import DatabaseFileInfo
-from ._models_py3 import DatabaseFileInput
-from ._models_py3 import DatabaseInfo
-from ._models_py3 import DatabaseMigration
-from ._models_py3 import DatabaseMigrationListResult
-from ._models_py3 import DatabaseMigrationProperties
-from ._models_py3 import DatabaseMigrationPropertiesSqlDb
-from ._models_py3 import DatabaseMigrationPropertiesSqlMi
-from ._models_py3 import DatabaseMigrationPropertiesSqlVm
-from ._models_py3 import DatabaseMigrationSqlDb
-from ._models_py3 import DatabaseMigrationSqlMi
-from ._models_py3 import DatabaseMigrationSqlVm
-from ._models_py3 import DatabaseObjectName
-from ._models_py3 import DatabaseSummaryResult
-from ._models_py3 import DatabaseTable
-from ._models_py3 import DeleteNode
-from ._models_py3 import ErrorInfo
-from ._models_py3 import ExecutionStatistics
-from ._models_py3 import FileList
-from ._models_py3 import FileShare
-from ._models_py3 import FileStorageInfo
-from ._models_py3 import GetProjectDetailsNonSqlTaskInput
-from ._models_py3 import GetTdeCertificatesSqlTaskInput
-from ._models_py3 import GetTdeCertificatesSqlTaskOutput
-from ._models_py3 import GetTdeCertificatesSqlTaskProperties
-from ._models_py3 import GetUserTablesMySqlTaskInput
-from ._models_py3 import GetUserTablesMySqlTaskOutput
-from ._models_py3 import GetUserTablesMySqlTaskProperties
-from ._models_py3 import GetUserTablesOracleTaskInput
-from ._models_py3 import GetUserTablesOracleTaskOutput
-from ._models_py3 import GetUserTablesOracleTaskProperties
-from ._models_py3 import GetUserTablesPostgreSqlTaskInput
-from ._models_py3 import GetUserTablesPostgreSqlTaskOutput
-from ._models_py3 import GetUserTablesPostgreSqlTaskProperties
-from ._models_py3 import GetUserTablesSqlSyncTaskInput
-from ._models_py3 import GetUserTablesSqlSyncTaskOutput
-from ._models_py3 import GetUserTablesSqlSyncTaskProperties
-from ._models_py3 import GetUserTablesSqlTaskInput
-from ._models_py3 import GetUserTablesSqlTaskOutput
-from ._models_py3 import GetUserTablesSqlTaskProperties
-from ._models_py3 import InstallOCIDriverTaskInput
-from ._models_py3 import InstallOCIDriverTaskOutput
-from ._models_py3 import InstallOCIDriverTaskProperties
-from ._models_py3 import IntegrationRuntimeMonitoringData
-from ._models_py3 import MiSqlConnectionInfo
-from ._models_py3 import MigrateMISyncCompleteCommandInput
-from ._models_py3 import MigrateMISyncCompleteCommandOutput
-from ._models_py3 import MigrateMISyncCompleteCommandProperties
-from ._models_py3 import MigrateMongoDbTaskProperties
-from ._models_py3 import MigrateMySqlAzureDbForMySqlOfflineDatabaseInput
-from ._models_py3 import MigrateMySqlAzureDbForMySqlOfflineTaskInput
-from ._models_py3 import MigrateMySqlAzureDbForMySqlOfflineTaskOutput
-from ._models_py3 import MigrateMySqlAzureDbForMySqlOfflineTaskOutputDatabaseLevel
-from ._models_py3 import MigrateMySqlAzureDbForMySqlOfflineTaskOutputError
-from ._models_py3 import MigrateMySqlAzureDbForMySqlOfflineTaskOutputMigrationLevel
-from ._models_py3 import MigrateMySqlAzureDbForMySqlOfflineTaskOutputTableLevel
-from ._models_py3 import MigrateMySqlAzureDbForMySqlOfflineTaskProperties
-from ._models_py3 import MigrateMySqlAzureDbForMySqlSyncDatabaseInput
-from ._models_py3 import MigrateMySqlAzureDbForMySqlSyncTaskInput
-from ._models_py3 import MigrateMySqlAzureDbForMySqlSyncTaskOutput
-from ._models_py3 import MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseError
-from ._models_py3 import MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseLevel
-from ._models_py3 import MigrateMySqlAzureDbForMySqlSyncTaskOutputError
-from ._models_py3 import MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevel
-from ._models_py3 import MigrateMySqlAzureDbForMySqlSyncTaskOutputTableLevel
-from ._models_py3 import MigrateMySqlAzureDbForMySqlSyncTaskProperties
-from ._models_py3 import MigrateOracleAzureDbForPostgreSqlSyncTaskProperties
-from ._models_py3 import MigrateOracleAzureDbPostgreSqlSyncDatabaseInput
-from ._models_py3 import MigrateOracleAzureDbPostgreSqlSyncTaskInput
-from ._models_py3 import MigrateOracleAzureDbPostgreSqlSyncTaskOutput
-from ._models_py3 import MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseError
-from ._models_py3 import MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseLevel
-from ._models_py3 import MigrateOracleAzureDbPostgreSqlSyncTaskOutputError
-from ._models_py3 import MigrateOracleAzureDbPostgreSqlSyncTaskOutputMigrationLevel
-from ._models_py3 import MigrateOracleAzureDbPostgreSqlSyncTaskOutputTableLevel
-from ._models_py3 import MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInput
-from ._models_py3 import MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseTableInput
-from ._models_py3 import MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInput
-from ._models_py3 import MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput
-from ._models_py3 import MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseError
-from ._models_py3 import MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseLevel
-from ._models_py3 import MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputError
-from ._models_py3 import MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputMigrationLevel
-from ._models_py3 import MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputTableLevel
-from ._models_py3 import MigratePostgreSqlAzureDbForPostgreSqlSyncTaskProperties
-from ._models_py3 import MigrateSchemaSqlServerSqlDbDatabaseInput
-from ._models_py3 import MigrateSchemaSqlServerSqlDbTaskInput
-from ._models_py3 import MigrateSchemaSqlServerSqlDbTaskOutput
-from ._models_py3 import MigrateSchemaSqlServerSqlDbTaskOutputDatabaseLevel
-from ._models_py3 import MigrateSchemaSqlServerSqlDbTaskOutputError
-from ._models_py3 import MigrateSchemaSqlServerSqlDbTaskOutputMigrationLevel
-from ._models_py3 import MigrateSchemaSqlServerSqlDbTaskProperties
-from ._models_py3 import MigrateSchemaSqlTaskOutputError
-from ._models_py3 import MigrateSqlServerDatabaseInput
-from ._models_py3 import MigrateSqlServerSqlDbDatabaseInput
-from ._models_py3 import MigrateSqlServerSqlDbSyncDatabaseInput
-from ._models_py3 import MigrateSqlServerSqlDbSyncTaskInput
-from ._models_py3 import MigrateSqlServerSqlDbSyncTaskOutput
-from ._models_py3 import MigrateSqlServerSqlDbSyncTaskOutputDatabaseError
-from ._models_py3 import MigrateSqlServerSqlDbSyncTaskOutputDatabaseLevel
-from ._models_py3 import MigrateSqlServerSqlDbSyncTaskOutputError
-from ._models_py3 import MigrateSqlServerSqlDbSyncTaskOutputMigrationLevel
-from ._models_py3 import MigrateSqlServerSqlDbSyncTaskOutputTableLevel
-from ._models_py3 import MigrateSqlServerSqlDbSyncTaskProperties
-from ._models_py3 import MigrateSqlServerSqlDbTaskInput
-from ._models_py3 import MigrateSqlServerSqlDbTaskOutput
-from ._models_py3 import MigrateSqlServerSqlDbTaskOutputDatabaseLevel
-from ._models_py3 import MigrateSqlServerSqlDbTaskOutputDatabaseLevelValidationResult
-from ._models_py3 import MigrateSqlServerSqlDbTaskOutputError
-from ._models_py3 import MigrateSqlServerSqlDbTaskOutputMigrationLevel
-from ._models_py3 import MigrateSqlServerSqlDbTaskOutputTableLevel
-from ._models_py3 import MigrateSqlServerSqlDbTaskOutputValidationResult
-from ._models_py3 import MigrateSqlServerSqlDbTaskProperties
-from ._models_py3 import MigrateSqlServerSqlMIDatabaseInput
-from ._models_py3 import MigrateSqlServerSqlMISyncTaskInput
-from ._models_py3 import MigrateSqlServerSqlMISyncTaskOutput
-from ._models_py3 import MigrateSqlServerSqlMISyncTaskOutputDatabaseLevel
-from ._models_py3 import MigrateSqlServerSqlMISyncTaskOutputError
-from ._models_py3 import MigrateSqlServerSqlMISyncTaskOutputMigrationLevel
-from ._models_py3 import MigrateSqlServerSqlMISyncTaskProperties
-from ._models_py3 import MigrateSqlServerSqlMITaskInput
-from ._models_py3 import MigrateSqlServerSqlMITaskOutput
-from ._models_py3 import MigrateSqlServerSqlMITaskOutputAgentJobLevel
-from ._models_py3 import MigrateSqlServerSqlMITaskOutputDatabaseLevel
-from ._models_py3 import MigrateSqlServerSqlMITaskOutputError
-from ._models_py3 import MigrateSqlServerSqlMITaskOutputLoginLevel
-from ._models_py3 import MigrateSqlServerSqlMITaskOutputMigrationLevel
-from ._models_py3 import MigrateSqlServerSqlMITaskProperties
-from ._models_py3 import MigrateSsisTaskInput
-from ._models_py3 import MigrateSsisTaskOutput
-from ._models_py3 import MigrateSsisTaskOutputMigrationLevel
-from ._models_py3 import MigrateSsisTaskOutputProjectLevel
-from ._models_py3 import MigrateSsisTaskProperties
-from ._models_py3 import MigrateSyncCompleteCommandInput
-from ._models_py3 import MigrateSyncCompleteCommandOutput
-from ._models_py3 import MigrateSyncCompleteCommandProperties
-from ._models_py3 import MigrationEligibilityInfo
-from ._models_py3 import MigrationOperationInput
-from ._models_py3 import MigrationReportResult
-from ._models_py3 import MigrationStatusDetails
-from ._models_py3 import MigrationTableMetadata
-from ._models_py3 import MigrationValidationDatabaseLevelResult
-from ._models_py3 import MigrationValidationDatabaseSummaryResult
-from ._models_py3 import MigrationValidationOptions
-from ._models_py3 import MigrationValidationResult
-from ._models_py3 import MongoDbCancelCommand
-from ._models_py3 import MongoDbClusterInfo
-from ._models_py3 import MongoDbCollectionInfo
-from ._models_py3 import MongoDbCollectionProgress
-from ._models_py3 import MongoDbCollectionSettings
-from ._models_py3 import MongoDbCommandInput
-from ._models_py3 import MongoDbConnectionInfo
-from ._models_py3 import MongoDbDatabaseInfo
-from ._models_py3 import MongoDbDatabaseProgress
-from ._models_py3 import MongoDbDatabaseSettings
-from ._models_py3 import MongoDbError
-from ._models_py3 import MongoDbFinishCommand
-from ._models_py3 import MongoDbFinishCommandInput
-from ._models_py3 import MongoDbMigrationProgress
-from ._models_py3 import MongoDbMigrationSettings
-from ._models_py3 import MongoDbObjectInfo
-from ._models_py3 import MongoDbProgress
-from ._models_py3 import MongoDbRestartCommand
-from ._models_py3 import MongoDbShardKeyField
-from ._models_py3 import MongoDbShardKeyInfo
-from ._models_py3 import MongoDbShardKeySetting
-from ._models_py3 import MongoDbThrottlingSettings
-from ._models_py3 import MySqlConnectionInfo
-from ._models_py3 import NameAvailabilityRequest
-from ._models_py3 import NameAvailabilityResponse
-from ._models_py3 import NodeMonitoringData
-from ._models_py3 import NonSqlDataMigrationTable
-from ._models_py3 import NonSqlDataMigrationTableResult
-from ._models_py3 import NonSqlMigrationTaskInput
-from ._models_py3 import NonSqlMigrationTaskOutput
-from ._models_py3 import ODataError
-from ._models_py3 import OfflineConfiguration
-from ._models_py3 import OperationListResult
-from ._models_py3 import OperationsDefinition
-from ._models_py3 import OperationsDisplayDefinition
-from ._models_py3 import OracleConnectionInfo
-from ._models_py3 import OracleOCIDriverInfo
-from ._models_py3 import OrphanedUserInfo
-from ._models_py3 import PostgreSqlConnectionInfo
-from ._models_py3 import Project
-from ._models_py3 import ProjectFile
-from ._models_py3 import ProjectFileProperties
-from ._models_py3 import ProjectList
-from ._models_py3 import ProjectTask
-from ._models_py3 import ProjectTaskProperties
-from ._models_py3 import ProxyResource
-from ._models_py3 import QueryAnalysisValidationResult
-from ._models_py3 import QueryExecutionResult
-from ._models_py3 import Quota
-from ._models_py3 import QuotaList
-from ._models_py3 import QuotaName
-from ._models_py3 import RegenAuthKeys
-from ._models_py3 import ReportableException
-from ._models_py3 import Resource
-from ._models_py3 import ResourceSku
-from ._models_py3 import ResourceSkuCapabilities
-from ._models_py3 import ResourceSkuCapacity
-from ._models_py3 import ResourceSkuCosts
-from ._models_py3 import ResourceSkuRestrictions
-from ._models_py3 import ResourceSkusResult
-from ._models_py3 import SchemaComparisonValidationResult
-from ._models_py3 import SchemaComparisonValidationResultType
-from ._models_py3 import SchemaMigrationSetting
-from ._models_py3 import SelectedCertificateInput
-from ._models_py3 import ServerProperties
-from ._models_py3 import ServiceOperation
-from ._models_py3 import ServiceOperationDisplay
-from ._models_py3 import ServiceOperationList
-from ._models_py3 import ServiceSku
-from ._models_py3 import ServiceSkuList
-from ._models_py3 import SourceLocation
-from ._models_py3 import SqlBackupFileInfo
-from ._models_py3 import SqlBackupSetInfo
-from ._models_py3 import SqlConnectionInfo
-from ._models_py3 import SqlConnectionInformation
-from ._models_py3 import SqlDbMigrationStatusDetails
-from ._models_py3 import SqlDbOfflineConfiguration
-from ._models_py3 import SqlFileShare
-from ._models_py3 import SqlMigrationListResult
-from ._models_py3 import SqlMigrationService
-from ._models_py3 import SqlMigrationServiceUpdate
-from ._models_py3 import SqlMigrationTaskInput
-from ._models_py3 import SqlServerSqlMISyncTaskInput
-from ._models_py3 import SsisMigrationInfo
-from ._models_py3 import StartMigrationScenarioServerRoleResult
-from ._models_py3 import SyncMigrationDatabaseErrorEvent
-from ._models_py3 import SystemData
-from ._models_py3 import TargetLocation
-from ._models_py3 import TaskList
-from ._models_py3 import TrackedResource
-from ._models_py3 import UploadOCIDriverTaskInput
-from ._models_py3 import UploadOCIDriverTaskOutput
-from ._models_py3 import UploadOCIDriverTaskProperties
-from ._models_py3 import ValidateMigrationInputSqlServerSqlDbSyncTaskProperties
-from ._models_py3 import ValidateMigrationInputSqlServerSqlMISyncTaskInput
-from ._models_py3 import ValidateMigrationInputSqlServerSqlMISyncTaskOutput
-from ._models_py3 import ValidateMigrationInputSqlServerSqlMISyncTaskProperties
-from ._models_py3 import ValidateMigrationInputSqlServerSqlMITaskInput
-from ._models_py3 import ValidateMigrationInputSqlServerSqlMITaskOutput
-from ._models_py3 import ValidateMigrationInputSqlServerSqlMITaskProperties
-from ._models_py3 import ValidateMongoDbTaskProperties
-from ._models_py3 import ValidateOracleAzureDbForPostgreSqlSyncTaskProperties
-from ._models_py3 import ValidateOracleAzureDbPostgreSqlSyncTaskOutput
-from ._models_py3 import ValidateSyncMigrationInputSqlServerTaskInput
-from ._models_py3 import ValidateSyncMigrationInputSqlServerTaskOutput
-from ._models_py3 import ValidationError
-from ._models_py3 import WaitStatistics
+from typing import TYPE_CHECKING
-from ._data_migration_management_client_enums import AuthenticationType
-from ._data_migration_management_client_enums import BackupFileStatus
-from ._data_migration_management_client_enums import BackupMode
-from ._data_migration_management_client_enums import BackupType
-from ._data_migration_management_client_enums import CommandState
-from ._data_migration_management_client_enums import CommandType
-from ._data_migration_management_client_enums import CreatedByType
-from ._data_migration_management_client_enums import DataMigrationResultCode
-from ._data_migration_management_client_enums import DatabaseCompatLevel
-from ._data_migration_management_client_enums import DatabaseFileType
-from ._data_migration_management_client_enums import DatabaseMigrationStage
-from ._data_migration_management_client_enums import DatabaseMigrationState
-from ._data_migration_management_client_enums import DatabaseState
-from ._data_migration_management_client_enums import ErrorType
-from ._data_migration_management_client_enums import LoginMigrationStage
-from ._data_migration_management_client_enums import LoginType
-from ._data_migration_management_client_enums import MigrationState
-from ._data_migration_management_client_enums import MigrationStatus
-from ._data_migration_management_client_enums import MongoDbClusterType
-from ._data_migration_management_client_enums import MongoDbErrorType
-from ._data_migration_management_client_enums import MongoDbMigrationState
-from ._data_migration_management_client_enums import MongoDbProgressResultType
-from ._data_migration_management_client_enums import MongoDbReplication
-from ._data_migration_management_client_enums import MongoDbShardKeyOrder
-from ._data_migration_management_client_enums import MySqlTargetPlatformType
-from ._data_migration_management_client_enums import NameCheckFailureReason
-from ._data_migration_management_client_enums import ObjectType
-from ._data_migration_management_client_enums import OperationOrigin
-from ._data_migration_management_client_enums import ProjectProvisioningState
-from ._data_migration_management_client_enums import ProjectSourcePlatform
-from ._data_migration_management_client_enums import ProjectTargetPlatform
-from ._data_migration_management_client_enums import ReplicateMigrationState
-from ._data_migration_management_client_enums import ResourceSkuCapacityScaleType
-from ._data_migration_management_client_enums import ResourceSkuRestrictionsReasonCode
-from ._data_migration_management_client_enums import ResourceSkuRestrictionsType
-from ._data_migration_management_client_enums import ResourceType
-from ._data_migration_management_client_enums import ScenarioSource
-from ._data_migration_management_client_enums import ScenarioTarget
-from ._data_migration_management_client_enums import SchemaMigrationOption
-from ._data_migration_management_client_enums import SchemaMigrationStage
-from ._data_migration_management_client_enums import ServerLevelPermissionsGroup
-from ._data_migration_management_client_enums import ServiceProvisioningState
-from ._data_migration_management_client_enums import ServiceScalability
-from ._data_migration_management_client_enums import Severity
-from ._data_migration_management_client_enums import SqlSourcePlatform
-from ._data_migration_management_client_enums import SsisMigrationOverwriteOption
-from ._data_migration_management_client_enums import SsisMigrationStage
-from ._data_migration_management_client_enums import SsisStoreType
-from ._data_migration_management_client_enums import SyncDatabaseMigrationReportingState
-from ._data_migration_management_client_enums import SyncTableMigrationState
-from ._data_migration_management_client_enums import TaskState
-from ._data_migration_management_client_enums import TaskType
-from ._data_migration_management_client_enums import UpdateActionType
-from ._data_migration_management_client_enums import ValidationStatus
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+
+from ._models_py3 import ( # type: ignore
+ ApiError,
+ AuthenticationKeys,
+ AvailableServiceSku,
+ AvailableServiceSkuCapacity,
+ AvailableServiceSkuSku,
+ AzureActiveDirectoryApp,
+ AzureBlob,
+ BackupConfiguration,
+ BackupFileInfo,
+ BackupSetInfo,
+ BlobShare,
+ CheckOCIDriverTaskInput,
+ CheckOCIDriverTaskOutput,
+ CheckOCIDriverTaskProperties,
+ CommandProperties,
+ ConnectToMongoDbTaskProperties,
+ ConnectToSourceMySqlTaskInput,
+ ConnectToSourceMySqlTaskProperties,
+ ConnectToSourceNonSqlTaskOutput,
+ ConnectToSourceOracleSyncTaskInput,
+ ConnectToSourceOracleSyncTaskOutput,
+ ConnectToSourceOracleSyncTaskProperties,
+ ConnectToSourcePostgreSqlSyncTaskInput,
+ ConnectToSourcePostgreSqlSyncTaskOutput,
+ ConnectToSourcePostgreSqlSyncTaskProperties,
+ ConnectToSourceSqlServerSyncTaskProperties,
+ ConnectToSourceSqlServerTaskInput,
+ ConnectToSourceSqlServerTaskOutput,
+ ConnectToSourceSqlServerTaskOutputAgentJobLevel,
+ ConnectToSourceSqlServerTaskOutputDatabaseLevel,
+ ConnectToSourceSqlServerTaskOutputLoginLevel,
+ ConnectToSourceSqlServerTaskOutputTaskLevel,
+ ConnectToSourceSqlServerTaskProperties,
+ ConnectToTargetAzureDbForMySqlTaskInput,
+ ConnectToTargetAzureDbForMySqlTaskOutput,
+ ConnectToTargetAzureDbForMySqlTaskProperties,
+ ConnectToTargetAzureDbForPostgreSqlSyncTaskInput,
+ ConnectToTargetAzureDbForPostgreSqlSyncTaskOutput,
+ ConnectToTargetAzureDbForPostgreSqlSyncTaskProperties,
+ ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskInput,
+ ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutput,
+ ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapItem,
+ ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskProperties,
+ ConnectToTargetSqlDbSyncTaskInput,
+ ConnectToTargetSqlDbSyncTaskProperties,
+ ConnectToTargetSqlDbTaskInput,
+ ConnectToTargetSqlDbTaskOutput,
+ ConnectToTargetSqlDbTaskProperties,
+ ConnectToTargetSqlMISyncTaskInput,
+ ConnectToTargetSqlMISyncTaskOutput,
+ ConnectToTargetSqlMISyncTaskProperties,
+ ConnectToTargetSqlMITaskInput,
+ ConnectToTargetSqlMITaskOutput,
+ ConnectToTargetSqlMITaskProperties,
+ ConnectionInfo,
+ CopyProgressDetails,
+ DataIntegrityValidationResult,
+ DataItemMigrationSummaryResult,
+ DataMigrationError,
+ DataMigrationProjectMetadata,
+ DataMigrationService,
+ DataMigrationServiceList,
+ DataMigrationServiceStatusResponse,
+ Database,
+ DatabaseBackupInfo,
+ DatabaseFileInfo,
+ DatabaseFileInput,
+ DatabaseInfo,
+ DatabaseMigration,
+ DatabaseMigrationBase,
+ DatabaseMigrationBaseListResult,
+ DatabaseMigrationBaseProperties,
+ DatabaseMigrationCosmosDbMongo,
+ DatabaseMigrationCosmosDbMongoListResult,
+ DatabaseMigrationListResult,
+ DatabaseMigrationProperties,
+ DatabaseMigrationPropertiesCosmosDbMongo,
+ DatabaseMigrationPropertiesSqlDb,
+ DatabaseMigrationPropertiesSqlMi,
+ DatabaseMigrationPropertiesSqlVm,
+ DatabaseMigrationSqlDb,
+ DatabaseMigrationSqlMi,
+ DatabaseMigrationSqlVm,
+ DatabaseObjectName,
+ DatabaseSummaryResult,
+ DatabaseTable,
+ DeleteNode,
+ ErrorAdditionalInfo,
+ ErrorDetail,
+ ErrorInfo,
+ ErrorResponse,
+ ExecutionStatistics,
+ FileList,
+ FileShare,
+ FileStorageInfo,
+ GetProjectDetailsNonSqlTaskInput,
+ GetTdeCertificatesSqlTaskInput,
+ GetTdeCertificatesSqlTaskOutput,
+ GetTdeCertificatesSqlTaskProperties,
+ GetUserTablesMySqlTaskInput,
+ GetUserTablesMySqlTaskOutput,
+ GetUserTablesMySqlTaskProperties,
+ GetUserTablesOracleTaskInput,
+ GetUserTablesOracleTaskOutput,
+ GetUserTablesOracleTaskProperties,
+ GetUserTablesPostgreSqlTaskInput,
+ GetUserTablesPostgreSqlTaskOutput,
+ GetUserTablesPostgreSqlTaskProperties,
+ GetUserTablesSqlSyncTaskInput,
+ GetUserTablesSqlSyncTaskOutput,
+ GetUserTablesSqlSyncTaskProperties,
+ GetUserTablesSqlTaskInput,
+ GetUserTablesSqlTaskOutput,
+ GetUserTablesSqlTaskProperties,
+ InstallOCIDriverTaskInput,
+ InstallOCIDriverTaskOutput,
+ InstallOCIDriverTaskProperties,
+ IntegrationRuntimeMonitoringData,
+ ManagedServiceIdentity,
+ MiSqlConnectionInfo,
+ MigrateMISyncCompleteCommandInput,
+ MigrateMISyncCompleteCommandOutput,
+ MigrateMISyncCompleteCommandProperties,
+ MigrateMongoDbTaskProperties,
+ MigrateMySqlAzureDbForMySqlOfflineDatabaseInput,
+ MigrateMySqlAzureDbForMySqlOfflineTaskInput,
+ MigrateMySqlAzureDbForMySqlOfflineTaskOutput,
+ MigrateMySqlAzureDbForMySqlOfflineTaskOutputDatabaseLevel,
+ MigrateMySqlAzureDbForMySqlOfflineTaskOutputError,
+ MigrateMySqlAzureDbForMySqlOfflineTaskOutputMigrationLevel,
+ MigrateMySqlAzureDbForMySqlOfflineTaskOutputTableLevel,
+ MigrateMySqlAzureDbForMySqlOfflineTaskProperties,
+ MigrateMySqlAzureDbForMySqlSyncDatabaseInput,
+ MigrateMySqlAzureDbForMySqlSyncTaskInput,
+ MigrateMySqlAzureDbForMySqlSyncTaskOutput,
+ MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseError,
+ MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseLevel,
+ MigrateMySqlAzureDbForMySqlSyncTaskOutputError,
+ MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevel,
+ MigrateMySqlAzureDbForMySqlSyncTaskOutputTableLevel,
+ MigrateMySqlAzureDbForMySqlSyncTaskProperties,
+ MigrateOracleAzureDbForPostgreSqlSyncTaskProperties,
+ MigrateOracleAzureDbPostgreSqlSyncDatabaseInput,
+ MigrateOracleAzureDbPostgreSqlSyncTaskInput,
+ MigrateOracleAzureDbPostgreSqlSyncTaskOutput,
+ MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseError,
+ MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseLevel,
+ MigrateOracleAzureDbPostgreSqlSyncTaskOutputError,
+ MigrateOracleAzureDbPostgreSqlSyncTaskOutputMigrationLevel,
+ MigrateOracleAzureDbPostgreSqlSyncTaskOutputTableLevel,
+ MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInput,
+ MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseTableInput,
+ MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInput,
+ MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput,
+ MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseError,
+ MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseLevel,
+ MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputError,
+ MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputMigrationLevel,
+ MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputTableLevel,
+ MigratePostgreSqlAzureDbForPostgreSqlSyncTaskProperties,
+ MigrateSchemaSqlServerSqlDbDatabaseInput,
+ MigrateSchemaSqlServerSqlDbTaskInput,
+ MigrateSchemaSqlServerSqlDbTaskOutput,
+ MigrateSchemaSqlServerSqlDbTaskOutputDatabaseLevel,
+ MigrateSchemaSqlServerSqlDbTaskOutputError,
+ MigrateSchemaSqlServerSqlDbTaskOutputMigrationLevel,
+ MigrateSchemaSqlServerSqlDbTaskProperties,
+ MigrateSchemaSqlTaskOutputError,
+ MigrateSqlServerDatabaseInput,
+ MigrateSqlServerSqlDbDatabaseInput,
+ MigrateSqlServerSqlDbSyncDatabaseInput,
+ MigrateSqlServerSqlDbSyncTaskInput,
+ MigrateSqlServerSqlDbSyncTaskOutput,
+ MigrateSqlServerSqlDbSyncTaskOutputDatabaseError,
+ MigrateSqlServerSqlDbSyncTaskOutputDatabaseLevel,
+ MigrateSqlServerSqlDbSyncTaskOutputError,
+ MigrateSqlServerSqlDbSyncTaskOutputMigrationLevel,
+ MigrateSqlServerSqlDbSyncTaskOutputTableLevel,
+ MigrateSqlServerSqlDbSyncTaskProperties,
+ MigrateSqlServerSqlDbTaskInput,
+ MigrateSqlServerSqlDbTaskOutput,
+ MigrateSqlServerSqlDbTaskOutputDatabaseLevel,
+ MigrateSqlServerSqlDbTaskOutputDatabaseLevelValidationResult,
+ MigrateSqlServerSqlDbTaskOutputError,
+ MigrateSqlServerSqlDbTaskOutputMigrationLevel,
+ MigrateSqlServerSqlDbTaskOutputTableLevel,
+ MigrateSqlServerSqlDbTaskOutputValidationResult,
+ MigrateSqlServerSqlDbTaskProperties,
+ MigrateSqlServerSqlMIDatabaseInput,
+ MigrateSqlServerSqlMISyncTaskInput,
+ MigrateSqlServerSqlMISyncTaskOutput,
+ MigrateSqlServerSqlMISyncTaskOutputDatabaseLevel,
+ MigrateSqlServerSqlMISyncTaskOutputError,
+ MigrateSqlServerSqlMISyncTaskOutputMigrationLevel,
+ MigrateSqlServerSqlMISyncTaskProperties,
+ MigrateSqlServerSqlMITaskInput,
+ MigrateSqlServerSqlMITaskOutput,
+ MigrateSqlServerSqlMITaskOutputAgentJobLevel,
+ MigrateSqlServerSqlMITaskOutputDatabaseLevel,
+ MigrateSqlServerSqlMITaskOutputError,
+ MigrateSqlServerSqlMITaskOutputLoginLevel,
+ MigrateSqlServerSqlMITaskOutputMigrationLevel,
+ MigrateSqlServerSqlMITaskProperties,
+ MigrateSsisTaskInput,
+ MigrateSsisTaskOutput,
+ MigrateSsisTaskOutputMigrationLevel,
+ MigrateSsisTaskOutputProjectLevel,
+ MigrateSsisTaskProperties,
+ MigrateSyncCompleteCommandInput,
+ MigrateSyncCompleteCommandOutput,
+ MigrateSyncCompleteCommandProperties,
+ MigrationEligibilityInfo,
+ MigrationOperationInput,
+ MigrationReportResult,
+ MigrationService,
+ MigrationServiceListResult,
+ MigrationServiceUpdate,
+ MigrationStatusDetails,
+ MigrationTableMetadata,
+ MigrationValidationDatabaseLevelResult,
+ MigrationValidationDatabaseSummaryResult,
+ MigrationValidationOptions,
+ MigrationValidationResult,
+ MongoConnectionInformation,
+ MongoDbCancelCommand,
+ MongoDbClusterInfo,
+ MongoDbCollectionInfo,
+ MongoDbCollectionProgress,
+ MongoDbCollectionSettings,
+ MongoDbCommandInput,
+ MongoDbConnectionInfo,
+ MongoDbDatabaseInfo,
+ MongoDbDatabaseProgress,
+ MongoDbDatabaseSettings,
+ MongoDbError,
+ MongoDbFinishCommand,
+ MongoDbFinishCommandInput,
+ MongoDbMigrationProgress,
+ MongoDbMigrationSettings,
+ MongoDbObjectInfo,
+ MongoDbProgress,
+ MongoDbRestartCommand,
+ MongoDbShardKeyField,
+ MongoDbShardKeyInfo,
+ MongoDbShardKeySetting,
+ MongoDbThrottlingSettings,
+ MongoMigrationCollection,
+ MongoMigrationProgressDetails,
+ MySqlConnectionInfo,
+ NameAvailabilityRequest,
+ NameAvailabilityResponse,
+ NodeMonitoringData,
+ NonSqlDataMigrationTable,
+ NonSqlDataMigrationTableResult,
+ NonSqlMigrationTaskInput,
+ NonSqlMigrationTaskOutput,
+ ODataError,
+ OfflineConfiguration,
+ OperationListResult,
+ OperationsDefinition,
+ OperationsDisplayDefinition,
+ OracleConnectionInfo,
+ OracleOCIDriverInfo,
+ OrphanedUserInfo,
+ PostgreSqlConnectionInfo,
+ Project,
+ ProjectFile,
+ ProjectFileProperties,
+ ProjectList,
+ ProjectTask,
+ ProjectTaskProperties,
+ ProxyResource,
+ ProxyResourceAutoGenerated,
+ QueryAnalysisValidationResult,
+ QueryExecutionResult,
+ Quota,
+ QuotaList,
+ QuotaName,
+ RegenAuthKeys,
+ ReportableException,
+ Resource,
+ ResourceAutoGenerated,
+ ResourceSku,
+ ResourceSkuCapabilities,
+ ResourceSkuCapacity,
+ ResourceSkuCosts,
+ ResourceSkuRestrictions,
+ ResourceSkusResult,
+ SchemaComparisonValidationResult,
+ SchemaComparisonValidationResultType,
+ SchemaMigrationSetting,
+ SelectedCertificateInput,
+ ServerProperties,
+ ServiceOperation,
+ ServiceOperationDisplay,
+ ServiceOperationList,
+ ServiceSku,
+ ServiceSkuList,
+ SourceLocation,
+ SqlBackupFileInfo,
+ SqlBackupSetInfo,
+ SqlConnectionInfo,
+ SqlConnectionInformation,
+ SqlDbMigrationStatusDetails,
+ SqlDbOfflineConfiguration,
+ SqlFileShare,
+ SqlMigrationListResult,
+ SqlMigrationService,
+ SqlMigrationServiceUpdate,
+ SqlMigrationTaskInput,
+ SqlServerSqlMISyncTaskInput,
+ SsisMigrationInfo,
+ StartMigrationScenarioServerRoleResult,
+ SyncMigrationDatabaseErrorEvent,
+ SystemData,
+ SystemDataAutoGenerated,
+ TargetLocation,
+ TaskList,
+ TrackedResource,
+ TrackedResourceAutoGenerated,
+ UploadOCIDriverTaskInput,
+ UploadOCIDriverTaskOutput,
+ UploadOCIDriverTaskProperties,
+ UserAssignedIdentity,
+ ValidateMigrationInputSqlServerSqlDbSyncTaskProperties,
+ ValidateMigrationInputSqlServerSqlMISyncTaskInput,
+ ValidateMigrationInputSqlServerSqlMISyncTaskOutput,
+ ValidateMigrationInputSqlServerSqlMISyncTaskProperties,
+ ValidateMigrationInputSqlServerSqlMITaskInput,
+ ValidateMigrationInputSqlServerSqlMITaskOutput,
+ ValidateMigrationInputSqlServerSqlMITaskProperties,
+ ValidateMongoDbTaskProperties,
+ ValidateOracleAzureDbForPostgreSqlSyncTaskProperties,
+ ValidateOracleAzureDbPostgreSqlSyncTaskOutput,
+ ValidateSyncMigrationInputSqlServerTaskInput,
+ ValidateSyncMigrationInputSqlServerTaskOutput,
+ ValidationError,
+ WaitStatistics,
+)
+
+from ._data_migration_management_client_enums import ( # type: ignore
+ AuthType,
+ AuthenticationType,
+ BackupFileStatus,
+ BackupMode,
+ BackupType,
+ CommandState,
+ CommandType,
+ CreatedByType,
+ DataMigrationResultCode,
+ DatabaseCompatLevel,
+ DatabaseFileType,
+ DatabaseMigrationStage,
+ DatabaseMigrationState,
+ DatabaseState,
+ ErrorType,
+ LoginMigrationStage,
+ LoginType,
+ ManagedServiceIdentityType,
+ MigrationState,
+ MigrationStatus,
+ MongoDbClusterType,
+ MongoDbErrorType,
+ MongoDbMigrationState,
+ MongoDbProgressResultType,
+ MongoDbReplication,
+ MongoDbShardKeyOrder,
+ MongoMigrationStatus,
+ MySqlTargetPlatformType,
+ NameCheckFailureReason,
+ ObjectType,
+ OperationOrigin,
+ ProjectProvisioningState,
+ ProjectSourcePlatform,
+ ProjectTargetPlatform,
+ ProvisioningState,
+ ReplicateMigrationState,
+ ResourceSkuCapacityScaleType,
+ ResourceSkuRestrictionsReasonCode,
+ ResourceSkuRestrictionsType,
+ ResourceType,
+ ScenarioSource,
+ ScenarioTarget,
+ SchemaMigrationOption,
+ SchemaMigrationStage,
+ ServerLevelPermissionsGroup,
+ ServiceProvisioningState,
+ ServiceScalability,
+ Severity,
+ SqlSourcePlatform,
+ SsisMigrationOverwriteOption,
+ SsisMigrationStage,
+ SsisStoreType,
+ SyncDatabaseMigrationReportingState,
+ SyncTableMigrationState,
+ TaskState,
+ TaskType,
+ UpdateActionType,
+ ValidationStatus,
+)
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -452,8 +488,14 @@
"DatabaseFileInput",
"DatabaseInfo",
"DatabaseMigration",
+ "DatabaseMigrationBase",
+ "DatabaseMigrationBaseListResult",
+ "DatabaseMigrationBaseProperties",
+ "DatabaseMigrationCosmosDbMongo",
+ "DatabaseMigrationCosmosDbMongoListResult",
"DatabaseMigrationListResult",
"DatabaseMigrationProperties",
+ "DatabaseMigrationPropertiesCosmosDbMongo",
"DatabaseMigrationPropertiesSqlDb",
"DatabaseMigrationPropertiesSqlMi",
"DatabaseMigrationPropertiesSqlVm",
@@ -464,7 +506,10 @@
"DatabaseSummaryResult",
"DatabaseTable",
"DeleteNode",
+ "ErrorAdditionalInfo",
+ "ErrorDetail",
"ErrorInfo",
+ "ErrorResponse",
"ExecutionStatistics",
"FileList",
"FileShare",
@@ -492,6 +537,7 @@
"InstallOCIDriverTaskOutput",
"InstallOCIDriverTaskProperties",
"IntegrationRuntimeMonitoringData",
+ "ManagedServiceIdentity",
"MiSqlConnectionInfo",
"MigrateMISyncCompleteCommandInput",
"MigrateMISyncCompleteCommandOutput",
@@ -587,12 +633,16 @@
"MigrationEligibilityInfo",
"MigrationOperationInput",
"MigrationReportResult",
+ "MigrationService",
+ "MigrationServiceListResult",
+ "MigrationServiceUpdate",
"MigrationStatusDetails",
"MigrationTableMetadata",
"MigrationValidationDatabaseLevelResult",
"MigrationValidationDatabaseSummaryResult",
"MigrationValidationOptions",
"MigrationValidationResult",
+ "MongoConnectionInformation",
"MongoDbCancelCommand",
"MongoDbClusterInfo",
"MongoDbCollectionInfo",
@@ -615,6 +665,8 @@
"MongoDbShardKeyInfo",
"MongoDbShardKeySetting",
"MongoDbThrottlingSettings",
+ "MongoMigrationCollection",
+ "MongoMigrationProgressDetails",
"MySqlConnectionInfo",
"NameAvailabilityRequest",
"NameAvailabilityResponse",
@@ -639,6 +691,7 @@
"ProjectTask",
"ProjectTaskProperties",
"ProxyResource",
+ "ProxyResourceAutoGenerated",
"QueryAnalysisValidationResult",
"QueryExecutionResult",
"Quota",
@@ -647,6 +700,7 @@
"RegenAuthKeys",
"ReportableException",
"Resource",
+ "ResourceAutoGenerated",
"ResourceSku",
"ResourceSkuCapabilities",
"ResourceSkuCapacity",
@@ -680,12 +734,15 @@
"StartMigrationScenarioServerRoleResult",
"SyncMigrationDatabaseErrorEvent",
"SystemData",
+ "SystemDataAutoGenerated",
"TargetLocation",
"TaskList",
"TrackedResource",
+ "TrackedResourceAutoGenerated",
"UploadOCIDriverTaskInput",
"UploadOCIDriverTaskOutput",
"UploadOCIDriverTaskProperties",
+ "UserAssignedIdentity",
"ValidateMigrationInputSqlServerSqlDbSyncTaskProperties",
"ValidateMigrationInputSqlServerSqlMISyncTaskInput",
"ValidateMigrationInputSqlServerSqlMISyncTaskOutput",
@@ -700,6 +757,7 @@
"ValidateSyncMigrationInputSqlServerTaskOutput",
"ValidationError",
"WaitStatistics",
+ "AuthType",
"AuthenticationType",
"BackupFileStatus",
"BackupMode",
@@ -716,6 +774,7 @@
"ErrorType",
"LoginMigrationStage",
"LoginType",
+ "ManagedServiceIdentityType",
"MigrationState",
"MigrationStatus",
"MongoDbClusterType",
@@ -724,6 +783,7 @@
"MongoDbProgressResultType",
"MongoDbReplication",
"MongoDbShardKeyOrder",
+ "MongoMigrationStatus",
"MySqlTargetPlatformType",
"NameCheckFailureReason",
"ObjectType",
@@ -731,6 +791,7 @@
"ProjectProvisioningState",
"ProjectSourcePlatform",
"ProjectTargetPlatform",
+ "ProvisioningState",
"ReplicateMigrationState",
"ResourceSkuCapacityScaleType",
"ResourceSkuRestrictionsReasonCode",
@@ -755,5 +816,5 @@
"UpdateActionType",
"ValidationStatus",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_data_migration_management_client_enums.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_data_migration_management_client_enums.py
index 0fa404f7b186..6c0a8ba1181b 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_data_migration_management_client_enums.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_data_migration_management_client_enums.py
@@ -20,6 +20,15 @@ class AuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
ACTIVE_DIRECTORY_PASSWORD = "ActiveDirectoryPassword"
+class AuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Authentication type used for accessing Azure Blob Storage."""
+
+ ACCOUNT_KEY = "AccountKey"
+ """Use an account key for authentication."""
+ MANAGED_IDENTITY = "ManagedIdentity"
+ """Use a managed identity for authentication."""
+
+
class BackupFileStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""An enumeration of Status of the log backup file."""
@@ -72,7 +81,7 @@ class CommandType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
- """CreatedByType."""
+ """The type of identity that created the resource."""
USER = "User"
APPLICATION = "Application"
@@ -187,6 +196,17 @@ class LoginType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
EXTERNAL_GROUP = "ExternalGroup"
+class ManagedServiceIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of managed service identity (where both SystemAssigned and UserAssigned types are
+ allowed).
+ """
+
+ NONE = "None"
+ SYSTEM_ASSIGNED = "SystemAssigned"
+ USER_ASSIGNED = "UserAssigned"
+ SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned"
+
+
class MigrationState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Current state of migration."""
@@ -270,6 +290,16 @@ class MongoDbShardKeyOrder(str, Enum, metaclass=CaseInsensitiveEnumMeta):
HASHED = "Hashed"
+class MongoMigrationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Migration Status."""
+
+ NOT_STARTED = "NotStarted"
+ IN_PROGRESS = "InProgress"
+ COMPLETED = "Completed"
+ FAILED = "Failed"
+ CANCELED = "Canceled"
+
+
class MySqlTargetPlatformType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""An enumeration of possible target types when migrating from MySQL."""
@@ -329,6 +359,18 @@ class ProjectTargetPlatform(str, Enum, metaclass=CaseInsensitiveEnumMeta):
UNKNOWN = "Unknown"
+class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Provisioning State of migration. ProvisioningState as Succeeded implies that validations have
+ been performed and migration has started.
+ """
+
+ PROVISIONING = "Provisioning"
+ UPDATING = "Updating"
+ SUCCEEDED = "Succeeded"
+ FAILED = "Failed"
+ CANCELED = "Canceled"
+
+
class ReplicateMigrationState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Wrapper for replicate reported migration states."""
@@ -367,6 +409,7 @@ class ResourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
SQL_MI = "SqlMi"
SQL_VM = "SqlVm"
SQL_DB = "SqlDb"
+ MONGO_TO_COSMOS_DB_MONGO = "MongoToCosmosDbMongo"
class ScenarioSource(str, Enum, metaclass=CaseInsensitiveEnumMeta):
@@ -471,7 +514,7 @@ class SqlSourcePlatform(str, Enum, metaclass=CaseInsensitiveEnumMeta):
class SsisMigrationOverwriteOption(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The overwrite option for SSIS object migration, only ignore and overwrite are supported in DMS
- now and future may add Reuse option for container object.
+ (classic) now and future may add Reuse option for container object.
"""
IGNORE = "Ignore"
@@ -488,7 +531,7 @@ class SsisMigrationStage(str, Enum, metaclass=CaseInsensitiveEnumMeta):
class SsisStoreType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
- """An enumeration of supported source SSIS store type in DMS."""
+ """An enumeration of supported source SSIS store type in DMS (classic)."""
SSIS_CATALOG = "SsisCatalog"
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_models_py3.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_models_py3.py
index 3a2b38ac41ea..7ada402cf2d4 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_models_py3.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_models_py3.py
@@ -1,5 +1,5 @@
-# coding=utf-8
# pylint: disable=too-many-lines
+# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
@@ -16,10 +16,9 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from .. import models as _models
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
@@ -32,7 +31,7 @@ class ApiError(_serialization.Model):
:ivar error: Error information in OData format.
:vartype error: ~azure.mgmt.datamigration.models.ODataError
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
- :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated
"""
_validation = {
@@ -41,10 +40,10 @@ class ApiError(_serialization.Model):
_attribute_map = {
"error": {"key": "error", "type": "ODataError"},
- "system_data": {"key": "systemData", "type": "SystemData"},
+ "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"},
}
- def __init__(self, *, error: Optional["_models.ODataError"] = None, **kwargs):
+ def __init__(self, *, error: Optional["_models.ODataError"] = None, **kwargs: Any) -> None:
"""
:keyword error: Error information in OData format.
:paramtype error: ~azure.mgmt.datamigration.models.ODataError
@@ -68,7 +67,7 @@ class AuthenticationKeys(_serialization.Model):
"auth_key2": {"key": "authKey2", "type": "str"},
}
- def __init__(self, *, auth_key1: Optional[str] = None, auth_key2: Optional[str] = None, **kwargs):
+ def __init__(self, *, auth_key1: Optional[str] = None, auth_key2: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword auth_key1: The first authentication key.
:paramtype auth_key1: str
@@ -103,8 +102,8 @@ def __init__(
resource_type: Optional[str] = None,
sku: Optional["_models.AvailableServiceSkuSku"] = None,
capacity: Optional["_models.AvailableServiceSkuCapacity"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword resource_type: The resource type, including the provider namespace.
:paramtype resource_type: str
@@ -147,8 +146,8 @@ def __init__(
maximum: Optional[int] = None,
default: Optional[int] = None,
scale_type: Optional[Union[str, "_models.ServiceScalability"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword minimum: The minimum capacity, usually 0 or 1.
:paramtype minimum: int
@@ -194,8 +193,8 @@ def __init__(
family: Optional[str] = None,
size: Optional[str] = None,
tier: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword name: The name of the SKU.
:paramtype name: str
@@ -240,8 +239,8 @@ def __init__(
app_key: Optional[str] = None,
tenant_id: Optional[str] = None,
ignore_azure_permissions: Optional[bool] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword application_id: Application ID of the Azure Active Directory Application.
:paramtype application_id: str
@@ -262,6 +261,11 @@ def __init__(
class AzureBlob(_serialization.Model):
"""Azure Blob Details.
+ :ivar auth_type: Authentication type used for accessing Azure Blob Storage. Known values are:
+ "AccountKey" and "ManagedIdentity".
+ :vartype auth_type: str or ~azure.mgmt.datamigration.models.AuthType
+ :ivar identity: Identity details for authentication using a Managed Identity.
+ :vartype identity: ~azure.mgmt.datamigration.models.ManagedServiceIdentity
:ivar storage_account_resource_id: Resource Id of the storage account where backups are stored.
:vartype storage_account_resource_id: str
:ivar account_key: Storage Account Key.
@@ -271,6 +275,8 @@ class AzureBlob(_serialization.Model):
"""
_attribute_map = {
+ "auth_type": {"key": "authType", "type": "str"},
+ "identity": {"key": "identity", "type": "ManagedServiceIdentity"},
"storage_account_resource_id": {"key": "storageAccountResourceId", "type": "str"},
"account_key": {"key": "accountKey", "type": "str"},
"blob_container_name": {"key": "blobContainerName", "type": "str"},
@@ -279,12 +285,19 @@ class AzureBlob(_serialization.Model):
def __init__(
self,
*,
+ auth_type: Optional[Union[str, "_models.AuthType"]] = None,
+ identity: Optional["_models.ManagedServiceIdentity"] = None,
storage_account_resource_id: Optional[str] = None,
account_key: Optional[str] = None,
blob_container_name: Optional[str] = None,
- **kwargs
- ):
- """
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword auth_type: Authentication type used for accessing Azure Blob Storage. Known values
+ are: "AccountKey" and "ManagedIdentity".
+ :paramtype auth_type: str or ~azure.mgmt.datamigration.models.AuthType
+ :keyword identity: Identity details for authentication using a Managed Identity.
+ :paramtype identity: ~azure.mgmt.datamigration.models.ManagedServiceIdentity
:keyword storage_account_resource_id: Resource Id of the storage account where backups are
stored.
:paramtype storage_account_resource_id: str
@@ -294,6 +307,8 @@ def __init__(
:paramtype blob_container_name: str
"""
super().__init__(**kwargs)
+ self.auth_type = auth_type
+ self.identity = identity
self.storage_account_resource_id = storage_account_resource_id
self.account_key = account_key
self.blob_container_name = blob_container_name
@@ -318,8 +333,8 @@ def __init__(
*,
source_location: Optional["_models.SourceLocation"] = None,
target_location: Optional["_models.TargetLocation"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_location: Source location of backups.
:paramtype source_location: ~azure.mgmt.datamigration.models.SourceLocation
@@ -355,8 +370,8 @@ def __init__(
file_location: Optional[str] = None,
family_sequence_number: Optional[int] = None,
status: Optional[Union[str, "_models.BackupFileStatus"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword file_location: Location of the backup file in shared folder.
:paramtype file_location: str
@@ -425,8 +440,8 @@ def __init__(
backup_start_date: Optional[datetime.datetime] = None,
backup_finished_date: Optional[datetime.datetime] = None,
is_backup_restored: Optional[bool] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword backup_set_id: Id for the set of backup files.
:paramtype backup_set_id: str
@@ -475,7 +490,7 @@ class BlobShare(_serialization.Model):
"sas_uri": {"key": "sasUri", "type": "str"},
}
- def __init__(self, *, sas_uri: Optional[str] = None, **kwargs):
+ def __init__(self, *, sas_uri: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword sas_uri: SAS URI of Azure Storage Account Container.
:paramtype sas_uri: str
@@ -495,7 +510,7 @@ class CheckOCIDriverTaskInput(_serialization.Model):
"server_version": {"key": "serverVersion", "type": "str"},
}
- def __init__(self, *, server_version: Optional[str] = None, **kwargs):
+ def __init__(self, *, server_version: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword server_version: Version of the source server to check against. Optional.
:paramtype server_version: str
@@ -524,7 +539,7 @@ class CheckOCIDriverTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, *, installed_driver: Optional["_models.OracleOCIDriverInfo"] = None, **kwargs):
+ def __init__(self, *, installed_driver: Optional["_models.OracleOCIDriverInfo"] = None, **kwargs: Any) -> None:
"""
:keyword installed_driver: Information about the installed driver if found and valid.
:paramtype installed_driver: ~azure.mgmt.datamigration.models.OracleOCIDriverInfo
@@ -535,7 +550,8 @@ def __init__(self, *, installed_driver: Optional["_models.OracleOCIDriverInfo"]
class ProjectTaskProperties(_serialization.Model):
- """Base class for all types of DMS task properties. If task is not supported by current client, this object is returned.
+ """Base class for all types of DMS (classic) task properties. If task is not supported by current
+ client, this object is returned.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
ConnectToMongoDbTaskProperties, ConnectToSourceMySqlTaskProperties,
@@ -564,7 +580,7 @@ class ProjectTaskProperties(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -654,7 +670,7 @@ class ProjectTaskProperties(_serialization.Model):
}
}
- def __init__(self, *, client_data: Optional[Dict[str, str]] = None, **kwargs):
+ def __init__(self, *, client_data: Optional[Dict[str, str]] = None, **kwargs: Any) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -672,7 +688,7 @@ class CheckOCIDriverTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -731,8 +747,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.CheckOCIDriverTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -746,7 +762,8 @@ def __init__(
class CommandProperties(_serialization.Model):
- """Base class for all types of DMS command properties. If command is not supported by current client, this object is returned.
+ """Base class for all types of DMS (classic) command properties. If command is not supported by
+ current client, this object is returned.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
MigrateMISyncCompleteCommandProperties, MigrateSyncCompleteCommandProperties,
@@ -754,7 +771,7 @@ class CommandProperties(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar command_type: Command type. Required. Known values are: "Migrate.Sync.Complete.Database",
"Migrate.SqlServer.AzureDbSqlMi.Complete", "cancel", "finish", and "restart".
@@ -788,7 +805,7 @@ class CommandProperties(_serialization.Model):
}
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.command_type: Optional[str] = None
@@ -803,7 +820,7 @@ class ConnectionInfo(_serialization.Model):
MiSqlConnectionInfo, MongoDbConnectionInfo, MySqlConnectionInfo, OracleConnectionInfo,
PostgreSqlConnectionInfo, SqlConnectionInfo
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar type: Type of connection info. Required.
:vartype type: str
@@ -834,7 +851,7 @@ class ConnectionInfo(_serialization.Model):
}
}
- def __init__(self, *, user_name: Optional[str] = None, password: Optional[str] = None, **kwargs):
+ def __init__(self, *, user_name: Optional[str] = None, password: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword user_name: User name.
:paramtype user_name: str
@@ -848,11 +865,12 @@ def __init__(self, *, user_name: Optional[str] = None, password: Optional[str] =
class ConnectToMongoDbTaskProperties(ProjectTaskProperties):
- """Properties for the task that validates the connection to and provides information about a MongoDB server.
+ """Properties for the task that validates the connection to and provides information about a
+ MongoDB server.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -911,8 +929,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.MongoDbConnectionInfo"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -928,7 +946,7 @@ def __init__(
class ConnectToSourceMySqlTaskInput(_serialization.Model):
"""Input for the task that validates MySQL database connection.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Information for connecting to MySQL source. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo
@@ -962,8 +980,8 @@ def __init__(
target_platform: Optional[Union[str, "_models.MySqlTargetPlatformType"]] = None,
check_permissions_group: Optional[Union[str, "_models.ServerLevelPermissionsGroup"]] = None,
is_offline_migration: bool = False,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Information for connecting to MySQL source. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo
@@ -990,7 +1008,7 @@ class ConnectToSourceMySqlTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -1049,8 +1067,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.ConnectToSourceMySqlTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -1096,7 +1114,7 @@ class ConnectToSourceNonSqlTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -1109,7 +1127,7 @@ def __init__(self, **kwargs):
class ConnectToSourceOracleSyncTaskInput(_serialization.Model):
"""Input for the task that validates Oracle database connection.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Information for connecting to Oracle source. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.OracleConnectionInfo
@@ -1123,7 +1141,7 @@ class ConnectToSourceOracleSyncTaskInput(_serialization.Model):
"source_connection_info": {"key": "sourceConnectionInfo", "type": "OracleConnectionInfo"},
}
- def __init__(self, *, source_connection_info: "_models.OracleConnectionInfo", **kwargs):
+ def __init__(self, *, source_connection_info: "_models.OracleConnectionInfo", **kwargs: Any) -> None:
"""
:keyword source_connection_info: Information for connecting to Oracle source. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.OracleConnectionInfo
@@ -1161,7 +1179,7 @@ class ConnectToSourceOracleSyncTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.source_server_version = None
@@ -1175,7 +1193,7 @@ class ConnectToSourceOracleSyncTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -1234,8 +1252,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.ConnectToSourceOracleSyncTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -1251,7 +1269,7 @@ def __init__(
class ConnectToSourcePostgreSqlSyncTaskInput(_serialization.Model):
"""Input for the task that validates connection to PostgreSQL and source server requirements.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Connection information for source PostgreSQL server. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.PostgreSqlConnectionInfo
@@ -1265,7 +1283,7 @@ class ConnectToSourcePostgreSqlSyncTaskInput(_serialization.Model):
"source_connection_info": {"key": "sourceConnectionInfo", "type": "PostgreSqlConnectionInfo"},
}
- def __init__(self, *, source_connection_info: "_models.PostgreSqlConnectionInfo", **kwargs):
+ def __init__(self, *, source_connection_info: "_models.PostgreSqlConnectionInfo", **kwargs: Any) -> None:
"""
:keyword source_connection_info: Connection information for source PostgreSQL server. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.PostgreSqlConnectionInfo
@@ -1307,7 +1325,7 @@ class ConnectToSourcePostgreSqlSyncTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -1317,12 +1335,13 @@ def __init__(self, **kwargs):
self.validation_errors = None
-class ConnectToSourcePostgreSqlSyncTaskProperties(ProjectTaskProperties):
- """Properties for the task that validates connection to PostgreSQL server and source server requirements for online migration.
+class ConnectToSourcePostgreSqlSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long
+ """Properties for the task that validates connection to PostgreSQL server and source server
+ requirements for online migration.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -1381,8 +1400,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.ConnectToSourcePostgreSqlSyncTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -1395,12 +1414,13 @@ def __init__(
self.output = None
-class ConnectToSourceSqlServerSyncTaskProperties(ProjectTaskProperties):
- """Properties for the task that validates connection to SQL Server and source server requirements for online migration.
+class ConnectToSourceSqlServerSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long
+ """Properties for the task that validates connection to SQL Server and source server requirements
+ for online migration.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -1459,8 +1479,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.ConnectToSourceSqlServerTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -1474,9 +1494,10 @@ def __init__(
class ConnectToSourceSqlServerTaskInput(_serialization.Model):
- """Input for the task that validates connection to SQL Server and also validates source server requirements.
+ """Input for the task that validates connection to SQL Server and also validates source server
+ requirements.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Connection information for Source SQL Server. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -1527,8 +1548,8 @@ def __init__(
collect_tde_certificate_info: bool = False,
validate_ssis_catalog_only: bool = False,
encrypted_key_for_secure_fields: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Connection information for Source SQL Server. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -1564,7 +1585,8 @@ def __init__(
class ConnectToSourceSqlServerTaskOutput(_serialization.Model):
- """Output for the task that validates connection to SQL Server and also validates source server requirements.
+ """Output for the task that validates connection to SQL Server and also validates source server
+ requirements.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
ConnectToSourceSqlServerTaskOutputAgentJobLevel,
@@ -1573,7 +1595,7 @@ class ConnectToSourceSqlServerTaskOutput(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -1600,19 +1622,22 @@ class ConnectToSourceSqlServerTaskOutput(_serialization.Model):
}
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
self.result_type: Optional[str] = None
-class ConnectToSourceSqlServerTaskOutputAgentJobLevel(ConnectToSourceSqlServerTaskOutput):
- """Agent Job level output for the task that validates connection to SQL Server and also validates source server requirements.
+class ConnectToSourceSqlServerTaskOutputAgentJobLevel(
+ ConnectToSourceSqlServerTaskOutput
+): # pylint: disable=name-too-long
+ """Agent Job level output for the task that validates connection to SQL Server and also validates
+ source server requirements.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -1658,7 +1683,7 @@ class ConnectToSourceSqlServerTaskOutputAgentJobLevel(ConnectToSourceSqlServerTa
"migration_eligibility": {"key": "migrationEligibility", "type": "MigrationEligibilityInfo"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "AgentJobLevelOutput"
@@ -1671,12 +1696,15 @@ def __init__(self, **kwargs):
self.migration_eligibility = None
-class ConnectToSourceSqlServerTaskOutputDatabaseLevel(ConnectToSourceSqlServerTaskOutput):
- """Database level output for the task that validates connection to SQL Server and also validates source server requirements.
+class ConnectToSourceSqlServerTaskOutputDatabaseLevel(
+ ConnectToSourceSqlServerTaskOutput
+): # pylint: disable=name-too-long
+ """Database level output for the task that validates connection to SQL Server and also validates
+ source server requirements.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -1718,7 +1746,7 @@ class ConnectToSourceSqlServerTaskOutputDatabaseLevel(ConnectToSourceSqlServerTa
"database_state": {"key": "databaseState", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "DatabaseLevelOutput"
@@ -1729,12 +1757,13 @@ def __init__(self, **kwargs):
self.database_state = None
-class ConnectToSourceSqlServerTaskOutputLoginLevel(ConnectToSourceSqlServerTaskOutput):
- """Login level output for the task that validates connection to SQL Server and also validates source server requirements.
+class ConnectToSourceSqlServerTaskOutputLoginLevel(ConnectToSourceSqlServerTaskOutput): # pylint: disable=name-too-long
+ """Login level output for the task that validates connection to SQL Server and also validates
+ source server requirements.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -1773,7 +1802,7 @@ class ConnectToSourceSqlServerTaskOutputLoginLevel(ConnectToSourceSqlServerTaskO
"migration_eligibility": {"key": "migrationEligibility", "type": "MigrationEligibilityInfo"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "LoginLevelOutput"
@@ -1784,12 +1813,13 @@ def __init__(self, **kwargs):
self.migration_eligibility = None
-class ConnectToSourceSqlServerTaskOutputTaskLevel(ConnectToSourceSqlServerTaskOutput):
- """Task level output for the task that validates connection to SQL Server and also validates source server requirements.
+class ConnectToSourceSqlServerTaskOutputTaskLevel(ConnectToSourceSqlServerTaskOutput): # pylint: disable=name-too-long
+ """Task level output for the task that validates connection to SQL Server and also validates
+ source server requirements.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -1836,7 +1866,7 @@ class ConnectToSourceSqlServerTaskOutputTaskLevel(ConnectToSourceSqlServerTaskOu
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "TaskLevelOutput"
@@ -1850,11 +1880,12 @@ def __init__(self, **kwargs):
class ConnectToSourceSqlServerTaskProperties(ProjectTaskProperties):
- """Properties for the task that validates connection to SQL Server and also validates source server requirements.
+ """Properties for the task that validates connection to SQL Server and also validates source
+ server requirements.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -1917,8 +1948,8 @@ def __init__(
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.ConnectToSourceSqlServerTaskInput"] = None,
task_id: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -1935,9 +1966,10 @@ def __init__(
class ConnectToTargetAzureDbForMySqlTaskInput(_serialization.Model):
- """Input for the task that validates connection to Azure Database for MySQL and target server requirements.
+ """Input for the task that validates connection to Azure Database for MySQL and target server
+ requirements.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Connection information for source MySQL server. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo
@@ -1965,8 +1997,8 @@ def __init__(
source_connection_info: "_models.MySqlConnectionInfo",
target_connection_info: "_models.MySqlConnectionInfo",
is_offline_migration: bool = False,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Connection information for source MySQL server. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo
@@ -1983,7 +2015,8 @@ def __init__(
class ConnectToTargetAzureDbForMySqlTaskOutput(_serialization.Model):
- """Output for the task that validates connection to Azure Database for MySQL and target server requirements.
+ """Output for the task that validates connection to Azure Database for MySQL and target server
+ requirements.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -2015,7 +2048,7 @@ class ConnectToTargetAzureDbForMySqlTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -2025,12 +2058,13 @@ def __init__(self, **kwargs):
self.validation_errors = None
-class ConnectToTargetAzureDbForMySqlTaskProperties(ProjectTaskProperties):
- """Properties for the task that validates connection to Azure Database for MySQL and target server requirements.
+class ConnectToTargetAzureDbForMySqlTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long
+ """Properties for the task that validates connection to Azure Database for MySQL and target server
+ requirements.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -2090,8 +2124,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.ConnectToTargetAzureDbForMySqlTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -2104,10 +2138,11 @@ def __init__(
self.output = None
-class ConnectToTargetAzureDbForPostgreSqlSyncTaskInput(_serialization.Model):
- """Input for the task that validates connection to Azure Database for PostgreSQL and target server requirements.
+class ConnectToTargetAzureDbForPostgreSqlSyncTaskInput(_serialization.Model): # pylint: disable=name-too-long
+ """Input for the task that validates connection to Azure Database for PostgreSQL and target server
+ requirements.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Connection information for source PostgreSQL server. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.PostgreSqlConnectionInfo
@@ -2131,8 +2166,8 @@ def __init__(
*,
source_connection_info: "_models.PostgreSqlConnectionInfo",
target_connection_info: "_models.PostgreSqlConnectionInfo",
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Connection information for source PostgreSQL server. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.PostgreSqlConnectionInfo
@@ -2145,8 +2180,9 @@ def __init__(
self.target_connection_info = target_connection_info
-class ConnectToTargetAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model):
- """Output for the task that validates connection to Azure Database for PostgreSQL and target server requirements.
+class ConnectToTargetAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model): # pylint: disable=name-too-long
+ """Output for the task that validates connection to Azure Database for PostgreSQL and target
+ server requirements.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -2178,7 +2214,7 @@ class ConnectToTargetAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -2188,12 +2224,13 @@ def __init__(self, **kwargs):
self.validation_errors = None
-class ConnectToTargetAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties):
- """Properties for the task that validates connection to Azure Database For PostgreSQL server and target server requirements for online migration.
+class ConnectToTargetAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long
+ """Properties for the task that validates connection to Azure Database For PostgreSQL server and
+ target server requirements for online migration.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -2254,8 +2291,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.ConnectToTargetAzureDbForPostgreSqlSyncTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -2269,10 +2306,11 @@ def __init__(
self.output = None
-class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskInput(_serialization.Model):
- """Input for the task that validates connection to Azure Database for PostgreSQL and target server requirements for Oracle source.
+class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskInput(_serialization.Model): # pylint: disable=name-too-long
+ """Input for the task that validates connection to Azure Database for PostgreSQL and target server
+ requirements for Oracle source.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar target_connection_info: Connection information for target Azure Database for PostgreSQL
server. Required.
@@ -2287,7 +2325,7 @@ class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskInput(_serialization.Mode
"target_connection_info": {"key": "targetConnectionInfo", "type": "PostgreSqlConnectionInfo"},
}
- def __init__(self, *, target_connection_info: "_models.PostgreSqlConnectionInfo", **kwargs):
+ def __init__(self, *, target_connection_info: "_models.PostgreSqlConnectionInfo", **kwargs: Any) -> None:
"""
:keyword target_connection_info: Connection information for target Azure Database for
PostgreSQL server. Required.
@@ -2297,8 +2335,9 @@ def __init__(self, *, target_connection_info: "_models.PostgreSqlConnectionInfo"
self.target_connection_info = target_connection_info
-class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model):
- """Output for the task that validates connection to Azure Database for PostgreSQL and target server requirements for Oracle source.
+class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model): # pylint: disable=name-too-long
+ """Output for the task that validates connection to Azure Database for PostgreSQL and target
+ server requirements for Oracle source.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -2339,12 +2378,12 @@ def __init__(
database_schema_map: Optional[
List["_models.ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapItem"]
] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword database_schema_map: Mapping of schemas per database.
:paramtype database_schema_map:
- list[~azure.mgmt.datamigration.models.ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapItem]
+ list[~azure.mgmt.datamigration.models.ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapItem] # pylint: disable=line-too-long
"""
super().__init__(**kwargs)
self.target_server_version = None
@@ -2354,7 +2393,9 @@ def __init__(
self.database_schema_map = database_schema_map
-class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapItem(_serialization.Model):
+class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapItem(
+ _serialization.Model
+): # pylint: disable=name-too-long
"""ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapItem.
:ivar database:
@@ -2368,7 +2409,7 @@ class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapIt
"schemas": {"key": "schemas", "type": "[str]"},
}
- def __init__(self, *, database: Optional[str] = None, schemas: Optional[List[str]] = None, **kwargs):
+ def __init__(self, *, database: Optional[str] = None, schemas: Optional[List[str]] = None, **kwargs: Any) -> None:
"""
:keyword database:
:paramtype database: str
@@ -2380,12 +2421,15 @@ def __init__(self, *, database: Optional[str] = None, schemas: Optional[List[str
self.schemas = schemas
-class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties):
- """Properties for the task that validates connection to Azure Database For PostgreSQL server and target server requirements for online migration for Oracle source.
+class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskProperties(
+ ProjectTaskProperties
+): # pylint: disable=name-too-long
+ """Properties for the task that validates connection to Azure Database For PostgreSQL server and
+ target server requirements for online migration for Oracle source.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -2446,8 +2490,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -2464,7 +2508,7 @@ def __init__(
class ConnectToTargetSqlDbSyncTaskInput(_serialization.Model):
"""Input for the task that validates connection to Azure SQL DB and target server requirements.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Connection information for source SQL Server. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -2487,8 +2531,8 @@ def __init__(
*,
source_connection_info: "_models.SqlConnectionInfo",
target_connection_info: "_models.SqlConnectionInfo",
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Connection information for source SQL Server. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -2501,11 +2545,12 @@ def __init__(
class ConnectToTargetSqlDbSyncTaskProperties(ProjectTaskProperties):
- """Properties for the task that validates connection to SQL DB and target server requirements for online migration.
+ """Properties for the task that validates connection to SQL DB and target server requirements for
+ online migration.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -2564,8 +2609,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.ConnectToTargetSqlDbSyncTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -2581,7 +2626,7 @@ def __init__(
class ConnectToTargetSqlDbTaskInput(_serialization.Model):
"""Input for the task that validates connection to SQL DB and target server requirements.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar target_connection_info: Connection information for target SQL DB. Required.
:vartype target_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -2604,8 +2649,8 @@ def __init__(
*,
target_connection_info: "_models.SqlConnectionInfo",
query_object_counts: Optional[bool] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword target_connection_info: Connection information for target SQL DB. Required.
:paramtype target_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -2647,7 +2692,7 @@ class ConnectToTargetSqlDbTaskOutput(_serialization.Model):
"target_server_brand_version": {"key": "targetServerBrandVersion", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -2661,7 +2706,7 @@ class ConnectToTargetSqlDbTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -2724,8 +2769,8 @@ def __init__(
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.ConnectToTargetSqlDbTaskInput"] = None,
created_on: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -2742,15 +2787,17 @@ def __init__(
class ConnectToTargetSqlMISyncTaskInput(_serialization.Model):
- """Input for the task that validates connection to Azure SQL Database Managed Instance online scenario.
+ """Input for the task that validates connection to Azure SQL Database Managed Instance online
+ scenario.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar target_connection_info: Connection information for Azure SQL Database Managed Instance.
Required.
:vartype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo
- :ivar azure_app: Azure Active Directory Application the DMS instance will use to connect to the
- target instance of Azure SQL Database Managed Instance and the Azure Storage Account. Required.
+ :ivar azure_app: Azure Active Directory Application the DMS (classic) instance will use to
+ connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage
+ Account. Required.
:vartype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp
"""
@@ -2769,15 +2816,15 @@ def __init__(
*,
target_connection_info: "_models.MiSqlConnectionInfo",
azure_app: "_models.AzureActiveDirectoryApp",
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword target_connection_info: Connection information for Azure SQL Database Managed
Instance. Required.
:paramtype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo
- :keyword azure_app: Azure Active Directory Application the DMS instance will use to connect to
- the target instance of Azure SQL Database Managed Instance and the Azure Storage Account.
- Required.
+ :keyword azure_app: Azure Active Directory Application the DMS (classic) instance will use to
+ connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage
+ Account. Required.
:paramtype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp
"""
super().__init__(**kwargs)
@@ -2810,7 +2857,7 @@ class ConnectToTargetSqlMISyncTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.target_server_version = None
@@ -2823,7 +2870,7 @@ class ConnectToTargetSqlMISyncTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -2882,8 +2929,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.ConnectToTargetSqlMISyncTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -2899,7 +2946,7 @@ def __init__(
class ConnectToTargetSqlMITaskInput(_serialization.Model):
"""Input for the task that validates connection to Azure SQL Database Managed Instance.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar target_connection_info: Connection information for target SQL Server. Required.
:vartype target_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -2930,8 +2977,8 @@ def __init__(
collect_logins: bool = True,
collect_agent_jobs: bool = True,
validate_ssis_catalog_only: bool = False,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword target_connection_info: Connection information for target SQL Server. Required.
:paramtype target_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -2987,7 +3034,7 @@ class ConnectToTargetSqlMITaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -3003,7 +3050,7 @@ class ConnectToTargetSqlMITaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -3062,8 +3109,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.ConnectToTargetSqlMITaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -3076,7 +3123,7 @@ def __init__(
self.output = None
-class CopyProgressDetails(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class CopyProgressDetails(_serialization.Model):
"""Details on progress of ADF copy activity.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -3133,7 +3180,7 @@ class CopyProgressDetails(_serialization.Model): # pylint: disable=too-many-ins
"copy_duration": {"key": "copyDuration", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.table_name = None
@@ -3149,7 +3196,7 @@ def __init__(self, **kwargs):
self.copy_duration = None
-class Database(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class Database(_serialization.Model):
"""Information about a single database.
:ivar id: Unique identifier for the database.
@@ -3233,8 +3280,8 @@ def __init__(
server_visible_online_core_count: Optional[int] = None,
database_state: Optional[Union[str, "_models.DatabaseState"]] = None,
server_id: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword id: Unique identifier for the database.
:paramtype id: str
@@ -3343,7 +3390,7 @@ class DatabaseBackupInfo(_serialization.Model):
"backup_finish_date": {"key": "backupFinishDate", "type": "iso-8601"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.database_name = None
@@ -3396,8 +3443,8 @@ def __init__(
restore_full_name: Optional[str] = None,
file_type: Optional[Union[str, "_models.DatabaseFileType"]] = None,
size_mb: Optional[float] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword database_name: Name of the database.
:paramtype database_name: str
@@ -3457,8 +3504,8 @@ def __init__(
physical_full_name: Optional[str] = None,
restore_full_name: Optional[str] = None,
file_type: Optional[Union[str, "_models.DatabaseFileType"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword id: Unique identifier for database file.
:paramtype id: str
@@ -3483,7 +3530,7 @@ def __init__(
class DatabaseInfo(_serialization.Model):
"""Project Database Details.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_database_name: Name of the database. Required.
:vartype source_database_name: str
@@ -3497,7 +3544,7 @@ class DatabaseInfo(_serialization.Model):
"source_database_name": {"key": "sourceDatabaseName", "type": "str"},
}
- def __init__(self, *, source_database_name: str, **kwargs):
+ def __init__(self, *, source_database_name: str, **kwargs: Any) -> None:
"""
:keyword source_database_name: Name of the database. Required.
:paramtype source_database_name: str
@@ -3506,37 +3553,65 @@ def __init__(self, *, source_database_name: str, **kwargs):
self.source_database_name = source_database_name
-class ProxyResource(_serialization.Model):
- """ProxyResource.
+class Resource(_serialization.Model):
+ """Common fields that are returned in the response for all Azure Resource Manager resources.
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id:
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
- :ivar name:
+ :ivar name: The name of the resource.
:vartype name: str
- :ivar type:
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
:vartype type: str
+ :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
+ information.
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
+ "system_data": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
+ "system_data": {"key": "systemData", "type": "SystemData"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
self.name = None
self.type = None
+ self.system_data = None
+
+
+class ProxyResource(Resource):
+ """The resource model definition for a Azure Resource Manager proxy resource. It will not have
+ tags and a location.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
+ information.
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
+ """
class DatabaseMigration(ProxyResource):
@@ -3544,13 +3619,16 @@ class DatabaseMigration(ProxyResource):
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id:
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
- :ivar name:
+ :ivar name: The name of the resource.
:vartype name: str
- :ivar type:
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
:vartype type: str
- :ivar system_data: Metadata pertaining to creation and last modification of the resource.
+ :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
+ information.
:vartype system_data: ~azure.mgmt.datamigration.models.SystemData
:ivar properties: Database Migration Resource properties.
:vartype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationProperties
@@ -3571,23 +3649,68 @@ class DatabaseMigration(ProxyResource):
"properties": {"key": "properties", "type": "DatabaseMigrationProperties"},
}
- def __init__(self, *, properties: Optional["_models.DatabaseMigrationProperties"] = None, **kwargs):
+ def __init__(self, *, properties: Optional["_models.DatabaseMigrationProperties"] = None, **kwargs: Any) -> None:
"""
:keyword properties: Database Migration Resource properties.
:paramtype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationProperties
"""
super().__init__(**kwargs)
- self.system_data = None
self.properties = properties
-class DatabaseMigrationListResult(_serialization.Model):
+class DatabaseMigrationBase(ProxyResource):
+ """Database Migration Resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
+ information.
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
+ :ivar properties: Database Migration Base Resource properties.
+ :vartype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationBaseProperties
+ """
+
+ _validation = {
+ "id": {"readonly": True},
+ "name": {"readonly": True},
+ "type": {"readonly": True},
+ "system_data": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "type": {"key": "type", "type": "str"},
+ "system_data": {"key": "systemData", "type": "SystemData"},
+ "properties": {"key": "properties", "type": "DatabaseMigrationBaseProperties"},
+ }
+
+ def __init__(
+ self, *, properties: Optional["_models.DatabaseMigrationBaseProperties"] = None, **kwargs: Any
+ ) -> None:
+ """
+ :keyword properties: Database Migration Base Resource properties.
+ :paramtype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationBaseProperties
+ """
+ super().__init__(**kwargs)
+ self.properties = properties
+
+
+class DatabaseMigrationBaseListResult(_serialization.Model):
"""A list of Database Migrations.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value:
- :vartype value: list[~azure.mgmt.datamigration.models.DatabaseMigration]
+ :vartype value: list[~azure.mgmt.datamigration.models.DatabaseMigrationBase]
:ivar next_link:
:vartype next_link: str
"""
@@ -3598,55 +3721,47 @@ class DatabaseMigrationListResult(_serialization.Model):
}
_attribute_map = {
- "value": {"key": "value", "type": "[DatabaseMigration]"},
+ "value": {"key": "value", "type": "[DatabaseMigrationBase]"},
"next_link": {"key": "nextLink", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.value = None
self.next_link = None
-class DatabaseMigrationProperties(_serialization.Model): # pylint: disable=too-many-instance-attributes
- """Database Migration Resource properties.
+class DatabaseMigrationBaseProperties(_serialization.Model):
+ """Database Migration Base Resource properties.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
- DatabaseMigrationPropertiesSqlDb, DatabaseMigrationPropertiesSqlMi,
- DatabaseMigrationPropertiesSqlVm
+ DatabaseMigrationProperties, DatabaseMigrationPropertiesCosmosDbMongo
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
- :ivar kind: Required. Known values are: "SqlMi", "SqlVm", and "SqlDb".
+ :ivar kind: Required. Known values are: "SqlMi", "SqlVm", "SqlDb", and "MongoToCosmosDbMongo".
:vartype kind: str or ~azure.mgmt.datamigration.models.ResourceType
- :ivar scope: Resource Id of the target resource (SQL VM or SQL Managed Instance).
+ :ivar scope: Resource Id of the target resource.
:vartype scope: str
:ivar provisioning_state: Provisioning State of migration. ProvisioningState as Succeeded
- implies that validations have been performed and migration has started.
- :vartype provisioning_state: str
+ implies that validations have been performed and migration has started. Known values are:
+ "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled".
+ :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState
:ivar migration_status: Migration status.
:vartype migration_status: str
:ivar started_on: Database migration start time.
:vartype started_on: ~datetime.datetime
:ivar ended_on: Database migration end time.
:vartype ended_on: ~datetime.datetime
- :ivar source_sql_connection: Source SQL Server connection details.
- :vartype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
- :ivar source_database_name: Name of the source database.
- :vartype source_database_name: str
- :ivar source_server_name: Name of the source sql server.
- :vartype source_server_name: str
:ivar migration_service: Resource Id of the Migration Service.
:vartype migration_service: str
- :ivar migration_operation_id: ID tracking current migration operation.
+ :ivar migration_operation_id: ID for current migration operation.
:vartype migration_operation_id: str
:ivar migration_failure_error: Error details in case of migration failure.
:vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo
- :ivar target_database_collation: Database collation to be used for the target database.
- :vartype target_database_collation: str
:ivar provisioning_error: Error message for migration provisioning failure, if any.
:vartype provisioning_error: str
"""
@@ -3657,7 +3772,6 @@ class DatabaseMigrationProperties(_serialization.Model): # pylint: disable=too-
"migration_status": {"readonly": True},
"started_on": {"readonly": True},
"ended_on": {"readonly": True},
- "source_server_name": {"readonly": True},
"migration_failure_error": {"readonly": True},
}
@@ -3668,21 +3782,16 @@ class DatabaseMigrationProperties(_serialization.Model): # pylint: disable=too-
"migration_status": {"key": "migrationStatus", "type": "str"},
"started_on": {"key": "startedOn", "type": "iso-8601"},
"ended_on": {"key": "endedOn", "type": "iso-8601"},
- "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"},
- "source_database_name": {"key": "sourceDatabaseName", "type": "str"},
- "source_server_name": {"key": "sourceServerName", "type": "str"},
"migration_service": {"key": "migrationService", "type": "str"},
"migration_operation_id": {"key": "migrationOperationId", "type": "str"},
"migration_failure_error": {"key": "migrationFailureError", "type": "ErrorInfo"},
- "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"},
"provisioning_error": {"key": "provisioningError", "type": "str"},
}
_subtype_map = {
"kind": {
- "SqlDb": "DatabaseMigrationPropertiesSqlDb",
- "SqlMi": "DatabaseMigrationPropertiesSqlMi",
- "SqlVm": "DatabaseMigrationPropertiesSqlVm",
+ "DatabaseMigrationProperties": "DatabaseMigrationProperties",
+ "MongoToCosmosDbMongo": "DatabaseMigrationPropertiesCosmosDbMongo",
}
}
@@ -3690,27 +3799,18 @@ def __init__(
self,
*,
scope: Optional[str] = None,
- source_sql_connection: Optional["_models.SqlConnectionInformation"] = None,
- source_database_name: Optional[str] = None,
migration_service: Optional[str] = None,
migration_operation_id: Optional[str] = None,
- target_database_collation: Optional[str] = None,
provisioning_error: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
- :keyword scope: Resource Id of the target resource (SQL VM or SQL Managed Instance).
+ :keyword scope: Resource Id of the target resource.
:paramtype scope: str
- :keyword source_sql_connection: Source SQL Server connection details.
- :paramtype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
- :keyword source_database_name: Name of the source database.
- :paramtype source_database_name: str
:keyword migration_service: Resource Id of the Migration Service.
:paramtype migration_service: str
- :keyword migration_operation_id: ID tracking current migration operation.
+ :keyword migration_operation_id: ID for current migration operation.
:paramtype migration_operation_id: str
- :keyword target_database_collation: Database collation to be used for the target database.
- :paramtype target_database_collation: str
:keyword provisioning_error: Error message for migration provisioning failure, if any.
:paramtype provisioning_error: str
"""
@@ -3721,182 +3821,581 @@ def __init__(
self.migration_status = None
self.started_on = None
self.ended_on = None
- self.source_sql_connection = source_sql_connection
- self.source_database_name = source_database_name
- self.source_server_name = None
self.migration_service = migration_service
self.migration_operation_id = migration_operation_id
self.migration_failure_error = None
- self.target_database_collation = target_database_collation
self.provisioning_error = provisioning_error
-class DatabaseMigrationPropertiesSqlDb(DatabaseMigrationProperties): # pylint: disable=too-many-instance-attributes
- """Database Migration Resource properties for SQL database.
+class DatabaseMigrationCosmosDbMongo(ProxyResource):
+ """Database Migration Resource for Mongo to CosmosDb.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
-
- :ivar kind: Required. Known values are: "SqlMi", "SqlVm", and "SqlDb".
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
+ information.
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
+ :ivar kind: Known values are: "SqlMi", "SqlVm", "SqlDb", and "MongoToCosmosDbMongo".
:vartype kind: str or ~azure.mgmt.datamigration.models.ResourceType
- :ivar scope: Resource Id of the target resource (SQL VM or SQL Managed Instance).
+ :ivar scope: Resource Id of the target resource.
:vartype scope: str
:ivar provisioning_state: Provisioning State of migration. ProvisioningState as Succeeded
- implies that validations have been performed and migration has started.
- :vartype provisioning_state: str
+ implies that validations have been performed and migration has started. Known values are:
+ "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled".
+ :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState
:ivar migration_status: Migration status.
:vartype migration_status: str
:ivar started_on: Database migration start time.
:vartype started_on: ~datetime.datetime
:ivar ended_on: Database migration end time.
:vartype ended_on: ~datetime.datetime
- :ivar source_sql_connection: Source SQL Server connection details.
- :vartype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
- :ivar source_database_name: Name of the source database.
- :vartype source_database_name: str
- :ivar source_server_name: Name of the source sql server.
- :vartype source_server_name: str
:ivar migration_service: Resource Id of the Migration Service.
:vartype migration_service: str
- :ivar migration_operation_id: ID tracking current migration operation.
+ :ivar migration_operation_id: ID for current migration operation.
:vartype migration_operation_id: str
:ivar migration_failure_error: Error details in case of migration failure.
:vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo
- :ivar target_database_collation: Database collation to be used for the target database.
- :vartype target_database_collation: str
:ivar provisioning_error: Error message for migration provisioning failure, if any.
:vartype provisioning_error: str
- :ivar migration_status_details: Detailed migration status. Not included by default.
- :vartype migration_status_details: ~azure.mgmt.datamigration.models.SqlDbMigrationStatusDetails
- :ivar target_sql_connection: Target SQL DB connection details.
- :vartype target_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
- :ivar offline_configuration: Offline configuration.
- :vartype offline_configuration: ~azure.mgmt.datamigration.models.SqlDbOfflineConfiguration
- :ivar table_list: List of tables to copy.
- :vartype table_list: list[str]
+ :ivar source_mongo_connection: Source Mongo connection details.
+ :vartype source_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation
+ :ivar target_mongo_connection: Target Cosmos DB Mongo connection details.
+ :vartype target_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation
+ :ivar collection_list: List of Mongo Collections to be migrated.
+ :vartype collection_list: list[~azure.mgmt.datamigration.models.MongoMigrationCollection]
"""
_validation = {
- "kind": {"required": True},
+ "id": {"readonly": True},
+ "name": {"readonly": True},
+ "type": {"readonly": True},
+ "system_data": {"readonly": True},
"provisioning_state": {"readonly": True},
"migration_status": {"readonly": True},
"started_on": {"readonly": True},
"ended_on": {"readonly": True},
- "source_server_name": {"readonly": True},
"migration_failure_error": {"readonly": True},
- "migration_status_details": {"readonly": True},
- "offline_configuration": {"readonly": True},
}
_attribute_map = {
- "kind": {"key": "kind", "type": "str"},
- "scope": {"key": "scope", "type": "str"},
- "provisioning_state": {"key": "provisioningState", "type": "str"},
- "migration_status": {"key": "migrationStatus", "type": "str"},
- "started_on": {"key": "startedOn", "type": "iso-8601"},
- "ended_on": {"key": "endedOn", "type": "iso-8601"},
- "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"},
- "source_database_name": {"key": "sourceDatabaseName", "type": "str"},
- "source_server_name": {"key": "sourceServerName", "type": "str"},
- "migration_service": {"key": "migrationService", "type": "str"},
- "migration_operation_id": {"key": "migrationOperationId", "type": "str"},
- "migration_failure_error": {"key": "migrationFailureError", "type": "ErrorInfo"},
- "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"},
- "provisioning_error": {"key": "provisioningError", "type": "str"},
- "migration_status_details": {"key": "migrationStatusDetails", "type": "SqlDbMigrationStatusDetails"},
- "target_sql_connection": {"key": "targetSqlConnection", "type": "SqlConnectionInformation"},
- "offline_configuration": {"key": "offlineConfiguration", "type": "SqlDbOfflineConfiguration"},
- "table_list": {"key": "tableList", "type": "[str]"},
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "type": {"key": "type", "type": "str"},
+ "system_data": {"key": "systemData", "type": "SystemData"},
+ "kind": {"key": "properties.kind", "type": "str"},
+ "scope": {"key": "properties.scope", "type": "str"},
+ "provisioning_state": {"key": "properties.provisioningState", "type": "str"},
+ "migration_status": {"key": "properties.migrationStatus", "type": "str"},
+ "started_on": {"key": "properties.startedOn", "type": "iso-8601"},
+ "ended_on": {"key": "properties.endedOn", "type": "iso-8601"},
+ "migration_service": {"key": "properties.migrationService", "type": "str"},
+ "migration_operation_id": {"key": "properties.migrationOperationId", "type": "str"},
+ "migration_failure_error": {"key": "properties.migrationFailureError", "type": "ErrorInfo"},
+ "provisioning_error": {"key": "properties.provisioningError", "type": "str"},
+ "source_mongo_connection": {"key": "properties.sourceMongoConnection", "type": "MongoConnectionInformation"},
+ "target_mongo_connection": {"key": "properties.targetMongoConnection", "type": "MongoConnectionInformation"},
+ "collection_list": {"key": "properties.collectionList", "type": "[MongoMigrationCollection]"},
}
def __init__(
self,
*,
scope: Optional[str] = None,
- source_sql_connection: Optional["_models.SqlConnectionInformation"] = None,
- source_database_name: Optional[str] = None,
migration_service: Optional[str] = None,
migration_operation_id: Optional[str] = None,
- target_database_collation: Optional[str] = None,
provisioning_error: Optional[str] = None,
- target_sql_connection: Optional["_models.SqlConnectionInformation"] = None,
- table_list: Optional[List[str]] = None,
- **kwargs
- ):
+ source_mongo_connection: Optional["_models.MongoConnectionInformation"] = None,
+ target_mongo_connection: Optional["_models.MongoConnectionInformation"] = None,
+ collection_list: Optional[List["_models.MongoMigrationCollection"]] = None,
+ **kwargs: Any
+ ) -> None:
"""
- :keyword scope: Resource Id of the target resource (SQL VM or SQL Managed Instance).
+ :keyword scope: Resource Id of the target resource.
:paramtype scope: str
- :keyword source_sql_connection: Source SQL Server connection details.
- :paramtype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
- :keyword source_database_name: Name of the source database.
- :paramtype source_database_name: str
:keyword migration_service: Resource Id of the Migration Service.
:paramtype migration_service: str
- :keyword migration_operation_id: ID tracking current migration operation.
+ :keyword migration_operation_id: ID for current migration operation.
:paramtype migration_operation_id: str
- :keyword target_database_collation: Database collation to be used for the target database.
- :paramtype target_database_collation: str
:keyword provisioning_error: Error message for migration provisioning failure, if any.
:paramtype provisioning_error: str
- :keyword target_sql_connection: Target SQL DB connection details.
- :paramtype target_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
- :keyword table_list: List of tables to copy.
- :paramtype table_list: list[str]
+ :keyword source_mongo_connection: Source Mongo connection details.
+ :paramtype source_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation
+ :keyword target_mongo_connection: Target Cosmos DB Mongo connection details.
+ :paramtype target_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation
+ :keyword collection_list: List of Mongo Collections to be migrated.
+ :paramtype collection_list: list[~azure.mgmt.datamigration.models.MongoMigrationCollection]
"""
- super().__init__(
- scope=scope,
- source_sql_connection=source_sql_connection,
- source_database_name=source_database_name,
- migration_service=migration_service,
- migration_operation_id=migration_operation_id,
- target_database_collation=target_database_collation,
- provisioning_error=provisioning_error,
- **kwargs
- )
- self.kind: str = "SqlDb"
- self.migration_status_details = None
- self.target_sql_connection = target_sql_connection
- self.offline_configuration = None
- self.table_list = table_list
+ super().__init__(**kwargs)
+ self.kind: Optional[str] = None
+ self.scope = scope
+ self.provisioning_state = None
+ self.migration_status = None
+ self.started_on = None
+ self.ended_on = None
+ self.migration_service = migration_service
+ self.migration_operation_id = migration_operation_id
+ self.migration_failure_error = None
+ self.provisioning_error = provisioning_error
+ self.source_mongo_connection = source_mongo_connection
+ self.target_mongo_connection = target_mongo_connection
+ self.collection_list = collection_list
-class DatabaseMigrationPropertiesSqlMi(DatabaseMigrationProperties): # pylint: disable=too-many-instance-attributes
- """Database Migration Resource properties for SQL Managed Instance.
+class DatabaseMigrationCosmosDbMongoListResult(_serialization.Model):
+ """A list of Database Migrations.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value:
+ :vartype value: list[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :ivar next_link:
+ :vartype next_link: str
+ """
+
+ _validation = {
+ "value": {"readonly": True},
+ "next_link": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "value": {"key": "value", "type": "[DatabaseMigrationCosmosDbMongo]"},
+ "next_link": {"key": "nextLink", "type": "str"},
+ }
+
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
+ super().__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class DatabaseMigrationListResult(_serialization.Model):
+ """A list of Database Migrations.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value:
+ :vartype value: list[~azure.mgmt.datamigration.models.DatabaseMigration]
+ :ivar next_link:
+ :vartype next_link: str
+ """
+
+ _validation = {
+ "value": {"readonly": True},
+ "next_link": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "value": {"key": "value", "type": "[DatabaseMigration]"},
+ "next_link": {"key": "nextLink", "type": "str"},
+ }
+
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
+ super().__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class DatabaseMigrationProperties(DatabaseMigrationBaseProperties):
+ """Database Migration Resource properties.
+
+ You probably want to use the sub-classes and not this class directly. Known sub-classes are:
+ DatabaseMigrationPropertiesSqlDb, DatabaseMigrationPropertiesSqlMi,
+ DatabaseMigrationPropertiesSqlVm
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
- :ivar kind: Required. Known values are: "SqlMi", "SqlVm", and "SqlDb".
+ :ivar kind: Required. Known values are: "SqlMi", "SqlVm", "SqlDb", and "MongoToCosmosDbMongo".
:vartype kind: str or ~azure.mgmt.datamigration.models.ResourceType
- :ivar scope: Resource Id of the target resource (SQL VM or SQL Managed Instance).
+ :ivar scope: Resource Id of the target resource.
:vartype scope: str
:ivar provisioning_state: Provisioning State of migration. ProvisioningState as Succeeded
- implies that validations have been performed and migration has started.
- :vartype provisioning_state: str
+ implies that validations have been performed and migration has started. Known values are:
+ "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled".
+ :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState
:ivar migration_status: Migration status.
:vartype migration_status: str
:ivar started_on: Database migration start time.
:vartype started_on: ~datetime.datetime
:ivar ended_on: Database migration end time.
:vartype ended_on: ~datetime.datetime
+ :ivar migration_service: Resource Id of the Migration Service.
+ :vartype migration_service: str
+ :ivar migration_operation_id: ID for current migration operation.
+ :vartype migration_operation_id: str
+ :ivar migration_failure_error: Error details in case of migration failure.
+ :vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo
+ :ivar provisioning_error: Error message for migration provisioning failure, if any.
+ :vartype provisioning_error: str
:ivar source_sql_connection: Source SQL Server connection details.
:vartype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
:ivar source_database_name: Name of the source database.
:vartype source_database_name: str
:ivar source_server_name: Name of the source sql server.
:vartype source_server_name: str
+ :ivar target_database_collation: Database collation to be used for the target database.
+ :vartype target_database_collation: str
+ """
+
+ _validation = {
+ "kind": {"required": True},
+ "provisioning_state": {"readonly": True},
+ "migration_status": {"readonly": True},
+ "started_on": {"readonly": True},
+ "ended_on": {"readonly": True},
+ "migration_failure_error": {"readonly": True},
+ "source_server_name": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "kind": {"key": "kind", "type": "str"},
+ "scope": {"key": "scope", "type": "str"},
+ "provisioning_state": {"key": "provisioningState", "type": "str"},
+ "migration_status": {"key": "migrationStatus", "type": "str"},
+ "started_on": {"key": "startedOn", "type": "iso-8601"},
+ "ended_on": {"key": "endedOn", "type": "iso-8601"},
+ "migration_service": {"key": "migrationService", "type": "str"},
+ "migration_operation_id": {"key": "migrationOperationId", "type": "str"},
+ "migration_failure_error": {"key": "migrationFailureError", "type": "ErrorInfo"},
+ "provisioning_error": {"key": "provisioningError", "type": "str"},
+ "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"},
+ "source_database_name": {"key": "sourceDatabaseName", "type": "str"},
+ "source_server_name": {"key": "sourceServerName", "type": "str"},
+ "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"},
+ }
+
+ _subtype_map = {
+ "kind": {
+ "SqlDb": "DatabaseMigrationPropertiesSqlDb",
+ "SqlMi": "DatabaseMigrationPropertiesSqlMi",
+ "SqlVm": "DatabaseMigrationPropertiesSqlVm",
+ }
+ }
+
+ def __init__(
+ self,
+ *,
+ scope: Optional[str] = None,
+ migration_service: Optional[str] = None,
+ migration_operation_id: Optional[str] = None,
+ provisioning_error: Optional[str] = None,
+ source_sql_connection: Optional["_models.SqlConnectionInformation"] = None,
+ source_database_name: Optional[str] = None,
+ target_database_collation: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword scope: Resource Id of the target resource.
+ :paramtype scope: str
+ :keyword migration_service: Resource Id of the Migration Service.
+ :paramtype migration_service: str
+ :keyword migration_operation_id: ID for current migration operation.
+ :paramtype migration_operation_id: str
+ :keyword provisioning_error: Error message for migration provisioning failure, if any.
+ :paramtype provisioning_error: str
+ :keyword source_sql_connection: Source SQL Server connection details.
+ :paramtype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
+ :keyword source_database_name: Name of the source database.
+ :paramtype source_database_name: str
+ :keyword target_database_collation: Database collation to be used for the target database.
+ :paramtype target_database_collation: str
+ """
+ super().__init__(
+ scope=scope,
+ migration_service=migration_service,
+ migration_operation_id=migration_operation_id,
+ provisioning_error=provisioning_error,
+ **kwargs
+ )
+ self.kind: str = "DatabaseMigrationProperties"
+ self.source_sql_connection = source_sql_connection
+ self.source_database_name = source_database_name
+ self.source_server_name = None
+ self.target_database_collation = target_database_collation
+
+
+class DatabaseMigrationPropertiesCosmosDbMongo(DatabaseMigrationBaseProperties):
+ """Database Migration Resource properties for CosmosDb for Mongo.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar kind: Required. Known values are: "SqlMi", "SqlVm", "SqlDb", and "MongoToCosmosDbMongo".
+ :vartype kind: str or ~azure.mgmt.datamigration.models.ResourceType
+ :ivar scope: Resource Id of the target resource.
+ :vartype scope: str
+ :ivar provisioning_state: Provisioning State of migration. ProvisioningState as Succeeded
+ implies that validations have been performed and migration has started. Known values are:
+ "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled".
+ :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState
+ :ivar migration_status: Migration status.
+ :vartype migration_status: str
+ :ivar started_on: Database migration start time.
+ :vartype started_on: ~datetime.datetime
+ :ivar ended_on: Database migration end time.
+ :vartype ended_on: ~datetime.datetime
:ivar migration_service: Resource Id of the Migration Service.
:vartype migration_service: str
- :ivar migration_operation_id: ID tracking current migration operation.
+ :ivar migration_operation_id: ID for current migration operation.
:vartype migration_operation_id: str
:ivar migration_failure_error: Error details in case of migration failure.
:vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo
+ :ivar provisioning_error: Error message for migration provisioning failure, if any.
+ :vartype provisioning_error: str
+ :ivar source_mongo_connection: Source Mongo connection details.
+ :vartype source_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation
+ :ivar target_mongo_connection: Target Cosmos DB Mongo connection details.
+ :vartype target_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation
+ :ivar collection_list: List of Mongo Collections to be migrated.
+ :vartype collection_list: list[~azure.mgmt.datamigration.models.MongoMigrationCollection]
+ """
+
+ _validation = {
+ "kind": {"required": True},
+ "provisioning_state": {"readonly": True},
+ "migration_status": {"readonly": True},
+ "started_on": {"readonly": True},
+ "ended_on": {"readonly": True},
+ "migration_failure_error": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "kind": {"key": "kind", "type": "str"},
+ "scope": {"key": "scope", "type": "str"},
+ "provisioning_state": {"key": "provisioningState", "type": "str"},
+ "migration_status": {"key": "migrationStatus", "type": "str"},
+ "started_on": {"key": "startedOn", "type": "iso-8601"},
+ "ended_on": {"key": "endedOn", "type": "iso-8601"},
+ "migration_service": {"key": "migrationService", "type": "str"},
+ "migration_operation_id": {"key": "migrationOperationId", "type": "str"},
+ "migration_failure_error": {"key": "migrationFailureError", "type": "ErrorInfo"},
+ "provisioning_error": {"key": "provisioningError", "type": "str"},
+ "source_mongo_connection": {"key": "sourceMongoConnection", "type": "MongoConnectionInformation"},
+ "target_mongo_connection": {"key": "targetMongoConnection", "type": "MongoConnectionInformation"},
+ "collection_list": {"key": "collectionList", "type": "[MongoMigrationCollection]"},
+ }
+
+ def __init__(
+ self,
+ *,
+ scope: Optional[str] = None,
+ migration_service: Optional[str] = None,
+ migration_operation_id: Optional[str] = None,
+ provisioning_error: Optional[str] = None,
+ source_mongo_connection: Optional["_models.MongoConnectionInformation"] = None,
+ target_mongo_connection: Optional["_models.MongoConnectionInformation"] = None,
+ collection_list: Optional[List["_models.MongoMigrationCollection"]] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword scope: Resource Id of the target resource.
+ :paramtype scope: str
+ :keyword migration_service: Resource Id of the Migration Service.
+ :paramtype migration_service: str
+ :keyword migration_operation_id: ID for current migration operation.
+ :paramtype migration_operation_id: str
+ :keyword provisioning_error: Error message for migration provisioning failure, if any.
+ :paramtype provisioning_error: str
+ :keyword source_mongo_connection: Source Mongo connection details.
+ :paramtype source_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation
+ :keyword target_mongo_connection: Target Cosmos DB Mongo connection details.
+ :paramtype target_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation
+ :keyword collection_list: List of Mongo Collections to be migrated.
+ :paramtype collection_list: list[~azure.mgmt.datamigration.models.MongoMigrationCollection]
+ """
+ super().__init__(
+ scope=scope,
+ migration_service=migration_service,
+ migration_operation_id=migration_operation_id,
+ provisioning_error=provisioning_error,
+ **kwargs
+ )
+ self.kind: str = "MongoToCosmosDbMongo"
+ self.source_mongo_connection = source_mongo_connection
+ self.target_mongo_connection = target_mongo_connection
+ self.collection_list = collection_list
+
+
+class DatabaseMigrationPropertiesSqlDb(DatabaseMigrationProperties):
+ """Database Migration Resource properties for SQL database.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar kind: Required. Known values are: "SqlMi", "SqlVm", "SqlDb", and "MongoToCosmosDbMongo".
+ :vartype kind: str or ~azure.mgmt.datamigration.models.ResourceType
+ :ivar scope: Resource Id of the target resource.
+ :vartype scope: str
+ :ivar provisioning_state: Provisioning State of migration. ProvisioningState as Succeeded
+ implies that validations have been performed and migration has started. Known values are:
+ "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled".
+ :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState
+ :ivar migration_status: Migration status.
+ :vartype migration_status: str
+ :ivar started_on: Database migration start time.
+ :vartype started_on: ~datetime.datetime
+ :ivar ended_on: Database migration end time.
+ :vartype ended_on: ~datetime.datetime
+ :ivar migration_service: Resource Id of the Migration Service.
+ :vartype migration_service: str
+ :ivar migration_operation_id: ID for current migration operation.
+ :vartype migration_operation_id: str
+ :ivar migration_failure_error: Error details in case of migration failure.
+ :vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo
+ :ivar provisioning_error: Error message for migration provisioning failure, if any.
+ :vartype provisioning_error: str
+ :ivar source_sql_connection: Source SQL Server connection details.
+ :vartype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
+ :ivar source_database_name: Name of the source database.
+ :vartype source_database_name: str
+ :ivar source_server_name: Name of the source sql server.
+ :vartype source_server_name: str
:ivar target_database_collation: Database collation to be used for the target database.
:vartype target_database_collation: str
+ :ivar migration_status_details: Detailed migration status. Not included by default.
+ :vartype migration_status_details: ~azure.mgmt.datamigration.models.SqlDbMigrationStatusDetails
+ :ivar target_sql_connection: Target SQL DB connection details.
+ :vartype target_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
+ :ivar offline_configuration: Offline configuration.
+ :vartype offline_configuration: ~azure.mgmt.datamigration.models.SqlDbOfflineConfiguration
+ :ivar table_list: List of tables to copy.
+ :vartype table_list: list[str]
+ """
+
+ _validation = {
+ "kind": {"required": True},
+ "provisioning_state": {"readonly": True},
+ "migration_status": {"readonly": True},
+ "started_on": {"readonly": True},
+ "ended_on": {"readonly": True},
+ "migration_failure_error": {"readonly": True},
+ "source_server_name": {"readonly": True},
+ "migration_status_details": {"readonly": True},
+ "offline_configuration": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "kind": {"key": "kind", "type": "str"},
+ "scope": {"key": "scope", "type": "str"},
+ "provisioning_state": {"key": "provisioningState", "type": "str"},
+ "migration_status": {"key": "migrationStatus", "type": "str"},
+ "started_on": {"key": "startedOn", "type": "iso-8601"},
+ "ended_on": {"key": "endedOn", "type": "iso-8601"},
+ "migration_service": {"key": "migrationService", "type": "str"},
+ "migration_operation_id": {"key": "migrationOperationId", "type": "str"},
+ "migration_failure_error": {"key": "migrationFailureError", "type": "ErrorInfo"},
+ "provisioning_error": {"key": "provisioningError", "type": "str"},
+ "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"},
+ "source_database_name": {"key": "sourceDatabaseName", "type": "str"},
+ "source_server_name": {"key": "sourceServerName", "type": "str"},
+ "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"},
+ "migration_status_details": {"key": "migrationStatusDetails", "type": "SqlDbMigrationStatusDetails"},
+ "target_sql_connection": {"key": "targetSqlConnection", "type": "SqlConnectionInformation"},
+ "offline_configuration": {"key": "offlineConfiguration", "type": "SqlDbOfflineConfiguration"},
+ "table_list": {"key": "tableList", "type": "[str]"},
+ }
+
+ def __init__(
+ self,
+ *,
+ scope: Optional[str] = None,
+ migration_service: Optional[str] = None,
+ migration_operation_id: Optional[str] = None,
+ provisioning_error: Optional[str] = None,
+ source_sql_connection: Optional["_models.SqlConnectionInformation"] = None,
+ source_database_name: Optional[str] = None,
+ target_database_collation: Optional[str] = None,
+ target_sql_connection: Optional["_models.SqlConnectionInformation"] = None,
+ table_list: Optional[List[str]] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword scope: Resource Id of the target resource.
+ :paramtype scope: str
+ :keyword migration_service: Resource Id of the Migration Service.
+ :paramtype migration_service: str
+ :keyword migration_operation_id: ID for current migration operation.
+ :paramtype migration_operation_id: str
+ :keyword provisioning_error: Error message for migration provisioning failure, if any.
+ :paramtype provisioning_error: str
+ :keyword source_sql_connection: Source SQL Server connection details.
+ :paramtype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
+ :keyword source_database_name: Name of the source database.
+ :paramtype source_database_name: str
+ :keyword target_database_collation: Database collation to be used for the target database.
+ :paramtype target_database_collation: str
+ :keyword target_sql_connection: Target SQL DB connection details.
+ :paramtype target_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
+ :keyword table_list: List of tables to copy.
+ :paramtype table_list: list[str]
+ """
+ super().__init__(
+ scope=scope,
+ migration_service=migration_service,
+ migration_operation_id=migration_operation_id,
+ provisioning_error=provisioning_error,
+ source_sql_connection=source_sql_connection,
+ source_database_name=source_database_name,
+ target_database_collation=target_database_collation,
+ **kwargs
+ )
+ self.kind: str = "SqlDb"
+ self.migration_status_details = None
+ self.target_sql_connection = target_sql_connection
+ self.offline_configuration = None
+ self.table_list = table_list
+
+
+class DatabaseMigrationPropertiesSqlMi(DatabaseMigrationProperties):
+ """Database Migration Resource properties for SQL Managed Instance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar kind: Required. Known values are: "SqlMi", "SqlVm", "SqlDb", and "MongoToCosmosDbMongo".
+ :vartype kind: str or ~azure.mgmt.datamigration.models.ResourceType
+ :ivar scope: Resource Id of the target resource.
+ :vartype scope: str
+ :ivar provisioning_state: Provisioning State of migration. ProvisioningState as Succeeded
+ implies that validations have been performed and migration has started. Known values are:
+ "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled".
+ :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState
+ :ivar migration_status: Migration status.
+ :vartype migration_status: str
+ :ivar started_on: Database migration start time.
+ :vartype started_on: ~datetime.datetime
+ :ivar ended_on: Database migration end time.
+ :vartype ended_on: ~datetime.datetime
+ :ivar migration_service: Resource Id of the Migration Service.
+ :vartype migration_service: str
+ :ivar migration_operation_id: ID for current migration operation.
+ :vartype migration_operation_id: str
+ :ivar migration_failure_error: Error details in case of migration failure.
+ :vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo
:ivar provisioning_error: Error message for migration provisioning failure, if any.
:vartype provisioning_error: str
+ :ivar source_sql_connection: Source SQL Server connection details.
+ :vartype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
+ :ivar source_database_name: Name of the source database.
+ :vartype source_database_name: str
+ :ivar source_server_name: Name of the source sql server.
+ :vartype source_server_name: str
+ :ivar target_database_collation: Database collation to be used for the target database.
+ :vartype target_database_collation: str
:ivar migration_status_details: Detailed migration status. Not included by default.
:vartype migration_status_details: ~azure.mgmt.datamigration.models.MigrationStatusDetails
:ivar backup_configuration: Backup configuration info.
@@ -3911,8 +4410,8 @@ class DatabaseMigrationPropertiesSqlMi(DatabaseMigrationProperties): # pylint:
"migration_status": {"readonly": True},
"started_on": {"readonly": True},
"ended_on": {"readonly": True},
- "source_server_name": {"readonly": True},
"migration_failure_error": {"readonly": True},
+ "source_server_name": {"readonly": True},
"migration_status_details": {"readonly": True},
}
@@ -3923,14 +4422,14 @@ class DatabaseMigrationPropertiesSqlMi(DatabaseMigrationProperties): # pylint:
"migration_status": {"key": "migrationStatus", "type": "str"},
"started_on": {"key": "startedOn", "type": "iso-8601"},
"ended_on": {"key": "endedOn", "type": "iso-8601"},
- "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"},
- "source_database_name": {"key": "sourceDatabaseName", "type": "str"},
- "source_server_name": {"key": "sourceServerName", "type": "str"},
"migration_service": {"key": "migrationService", "type": "str"},
"migration_operation_id": {"key": "migrationOperationId", "type": "str"},
"migration_failure_error": {"key": "migrationFailureError", "type": "ErrorInfo"},
- "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"},
"provisioning_error": {"key": "provisioningError", "type": "str"},
+ "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"},
+ "source_database_name": {"key": "sourceDatabaseName", "type": "str"},
+ "source_server_name": {"key": "sourceServerName", "type": "str"},
+ "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"},
"migration_status_details": {"key": "migrationStatusDetails", "type": "MigrationStatusDetails"},
"backup_configuration": {"key": "backupConfiguration", "type": "BackupConfiguration"},
"offline_configuration": {"key": "offlineConfiguration", "type": "OfflineConfiguration"},
@@ -3940,31 +4439,31 @@ def __init__(
self,
*,
scope: Optional[str] = None,
- source_sql_connection: Optional["_models.SqlConnectionInformation"] = None,
- source_database_name: Optional[str] = None,
migration_service: Optional[str] = None,
migration_operation_id: Optional[str] = None,
- target_database_collation: Optional[str] = None,
provisioning_error: Optional[str] = None,
+ source_sql_connection: Optional["_models.SqlConnectionInformation"] = None,
+ source_database_name: Optional[str] = None,
+ target_database_collation: Optional[str] = None,
backup_configuration: Optional["_models.BackupConfiguration"] = None,
offline_configuration: Optional["_models.OfflineConfiguration"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
- :keyword scope: Resource Id of the target resource (SQL VM or SQL Managed Instance).
+ :keyword scope: Resource Id of the target resource.
:paramtype scope: str
+ :keyword migration_service: Resource Id of the Migration Service.
+ :paramtype migration_service: str
+ :keyword migration_operation_id: ID for current migration operation.
+ :paramtype migration_operation_id: str
+ :keyword provisioning_error: Error message for migration provisioning failure, if any.
+ :paramtype provisioning_error: str
:keyword source_sql_connection: Source SQL Server connection details.
:paramtype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
:keyword source_database_name: Name of the source database.
:paramtype source_database_name: str
- :keyword migration_service: Resource Id of the Migration Service.
- :paramtype migration_service: str
- :keyword migration_operation_id: ID tracking current migration operation.
- :paramtype migration_operation_id: str
:keyword target_database_collation: Database collation to be used for the target database.
:paramtype target_database_collation: str
- :keyword provisioning_error: Error message for migration provisioning failure, if any.
- :paramtype provisioning_error: str
:keyword backup_configuration: Backup configuration info.
:paramtype backup_configuration: ~azure.mgmt.datamigration.models.BackupConfiguration
:keyword offline_configuration: Offline configuration.
@@ -3972,12 +4471,12 @@ def __init__(
"""
super().__init__(
scope=scope,
- source_sql_connection=source_sql_connection,
- source_database_name=source_database_name,
migration_service=migration_service,
migration_operation_id=migration_operation_id,
- target_database_collation=target_database_collation,
provisioning_error=provisioning_error,
+ source_sql_connection=source_sql_connection,
+ source_database_name=source_database_name,
+ target_database_collation=target_database_collation,
**kwargs
)
self.kind: str = "SqlMi"
@@ -3986,42 +4485,43 @@ def __init__(
self.offline_configuration = offline_configuration
-class DatabaseMigrationPropertiesSqlVm(DatabaseMigrationProperties): # pylint: disable=too-many-instance-attributes
+class DatabaseMigrationPropertiesSqlVm(DatabaseMigrationProperties):
"""Database Migration Resource properties for SQL Virtual Machine.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
- :ivar kind: Required. Known values are: "SqlMi", "SqlVm", and "SqlDb".
+ :ivar kind: Required. Known values are: "SqlMi", "SqlVm", "SqlDb", and "MongoToCosmosDbMongo".
:vartype kind: str or ~azure.mgmt.datamigration.models.ResourceType
- :ivar scope: Resource Id of the target resource (SQL VM or SQL Managed Instance).
+ :ivar scope: Resource Id of the target resource.
:vartype scope: str
:ivar provisioning_state: Provisioning State of migration. ProvisioningState as Succeeded
- implies that validations have been performed and migration has started.
- :vartype provisioning_state: str
+ implies that validations have been performed and migration has started. Known values are:
+ "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled".
+ :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState
:ivar migration_status: Migration status.
:vartype migration_status: str
:ivar started_on: Database migration start time.
:vartype started_on: ~datetime.datetime
:ivar ended_on: Database migration end time.
:vartype ended_on: ~datetime.datetime
+ :ivar migration_service: Resource Id of the Migration Service.
+ :vartype migration_service: str
+ :ivar migration_operation_id: ID for current migration operation.
+ :vartype migration_operation_id: str
+ :ivar migration_failure_error: Error details in case of migration failure.
+ :vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo
+ :ivar provisioning_error: Error message for migration provisioning failure, if any.
+ :vartype provisioning_error: str
:ivar source_sql_connection: Source SQL Server connection details.
:vartype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
:ivar source_database_name: Name of the source database.
:vartype source_database_name: str
:ivar source_server_name: Name of the source sql server.
:vartype source_server_name: str
- :ivar migration_service: Resource Id of the Migration Service.
- :vartype migration_service: str
- :ivar migration_operation_id: ID tracking current migration operation.
- :vartype migration_operation_id: str
- :ivar migration_failure_error: Error details in case of migration failure.
- :vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo
:ivar target_database_collation: Database collation to be used for the target database.
:vartype target_database_collation: str
- :ivar provisioning_error: Error message for migration provisioning failure, if any.
- :vartype provisioning_error: str
:ivar migration_status_details: Detailed migration status. Not included by default.
:vartype migration_status_details: ~azure.mgmt.datamigration.models.MigrationStatusDetails
:ivar backup_configuration: Backup configuration info.
@@ -4036,8 +4536,8 @@ class DatabaseMigrationPropertiesSqlVm(DatabaseMigrationProperties): # pylint:
"migration_status": {"readonly": True},
"started_on": {"readonly": True},
"ended_on": {"readonly": True},
- "source_server_name": {"readonly": True},
"migration_failure_error": {"readonly": True},
+ "source_server_name": {"readonly": True},
"migration_status_details": {"readonly": True},
}
@@ -4048,14 +4548,14 @@ class DatabaseMigrationPropertiesSqlVm(DatabaseMigrationProperties): # pylint:
"migration_status": {"key": "migrationStatus", "type": "str"},
"started_on": {"key": "startedOn", "type": "iso-8601"},
"ended_on": {"key": "endedOn", "type": "iso-8601"},
- "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"},
- "source_database_name": {"key": "sourceDatabaseName", "type": "str"},
- "source_server_name": {"key": "sourceServerName", "type": "str"},
"migration_service": {"key": "migrationService", "type": "str"},
"migration_operation_id": {"key": "migrationOperationId", "type": "str"},
"migration_failure_error": {"key": "migrationFailureError", "type": "ErrorInfo"},
- "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"},
"provisioning_error": {"key": "provisioningError", "type": "str"},
+ "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"},
+ "source_database_name": {"key": "sourceDatabaseName", "type": "str"},
+ "source_server_name": {"key": "sourceServerName", "type": "str"},
+ "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"},
"migration_status_details": {"key": "migrationStatusDetails", "type": "MigrationStatusDetails"},
"backup_configuration": {"key": "backupConfiguration", "type": "BackupConfiguration"},
"offline_configuration": {"key": "offlineConfiguration", "type": "OfflineConfiguration"},
@@ -4065,31 +4565,31 @@ def __init__(
self,
*,
scope: Optional[str] = None,
- source_sql_connection: Optional["_models.SqlConnectionInformation"] = None,
- source_database_name: Optional[str] = None,
migration_service: Optional[str] = None,
migration_operation_id: Optional[str] = None,
- target_database_collation: Optional[str] = None,
provisioning_error: Optional[str] = None,
+ source_sql_connection: Optional["_models.SqlConnectionInformation"] = None,
+ source_database_name: Optional[str] = None,
+ target_database_collation: Optional[str] = None,
backup_configuration: Optional["_models.BackupConfiguration"] = None,
offline_configuration: Optional["_models.OfflineConfiguration"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
- :keyword scope: Resource Id of the target resource (SQL VM or SQL Managed Instance).
+ :keyword scope: Resource Id of the target resource.
:paramtype scope: str
+ :keyword migration_service: Resource Id of the Migration Service.
+ :paramtype migration_service: str
+ :keyword migration_operation_id: ID for current migration operation.
+ :paramtype migration_operation_id: str
+ :keyword provisioning_error: Error message for migration provisioning failure, if any.
+ :paramtype provisioning_error: str
:keyword source_sql_connection: Source SQL Server connection details.
:paramtype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation
:keyword source_database_name: Name of the source database.
:paramtype source_database_name: str
- :keyword migration_service: Resource Id of the Migration Service.
- :paramtype migration_service: str
- :keyword migration_operation_id: ID tracking current migration operation.
- :paramtype migration_operation_id: str
:keyword target_database_collation: Database collation to be used for the target database.
:paramtype target_database_collation: str
- :keyword provisioning_error: Error message for migration provisioning failure, if any.
- :paramtype provisioning_error: str
:keyword backup_configuration: Backup configuration info.
:paramtype backup_configuration: ~azure.mgmt.datamigration.models.BackupConfiguration
:keyword offline_configuration: Offline configuration.
@@ -4097,12 +4597,12 @@ def __init__(
"""
super().__init__(
scope=scope,
- source_sql_connection=source_sql_connection,
- source_database_name=source_database_name,
migration_service=migration_service,
migration_operation_id=migration_operation_id,
- target_database_collation=target_database_collation,
provisioning_error=provisioning_error,
+ source_sql_connection=source_sql_connection,
+ source_database_name=source_database_name,
+ target_database_collation=target_database_collation,
**kwargs
)
self.kind: str = "SqlVm"
@@ -4111,7 +4611,45 @@ def __init__(
self.offline_configuration = offline_configuration
-class DatabaseMigrationSqlDb(ProxyResource):
+class ProxyResourceAutoGenerated(_serialization.Model):
+ """ProxyResourceAutoGenerated.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id:
+ :vartype id: str
+ :ivar name:
+ :vartype name: str
+ :ivar type:
+ :vartype type: str
+ :ivar system_data: Metadata pertaining to creation and last modification of the resource.
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated
+ """
+
+ _validation = {
+ "id": {"readonly": True},
+ "name": {"readonly": True},
+ "type": {"readonly": True},
+ "system_data": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "type": {"key": "type", "type": "str"},
+ "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"},
+ }
+
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
+ super().__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.system_data = None
+
+
+class DatabaseMigrationSqlDb(ProxyResourceAutoGenerated):
"""Database Migration Resource for SQL Database.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -4123,7 +4661,7 @@ class DatabaseMigrationSqlDb(ProxyResource):
:ivar type:
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
- :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated
:ivar properties: Database Migration Resource properties for SQL database.
:vartype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationPropertiesSqlDb
"""
@@ -4139,21 +4677,22 @@ class DatabaseMigrationSqlDb(ProxyResource):
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
- "system_data": {"key": "systemData", "type": "SystemData"},
+ "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"},
"properties": {"key": "properties", "type": "DatabaseMigrationPropertiesSqlDb"},
}
- def __init__(self, *, properties: Optional["_models.DatabaseMigrationPropertiesSqlDb"] = None, **kwargs):
+ def __init__(
+ self, *, properties: Optional["_models.DatabaseMigrationPropertiesSqlDb"] = None, **kwargs: Any
+ ) -> None:
"""
:keyword properties: Database Migration Resource properties for SQL database.
:paramtype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationPropertiesSqlDb
"""
super().__init__(**kwargs)
- self.system_data = None
self.properties = properties
-class DatabaseMigrationSqlMi(ProxyResource):
+class DatabaseMigrationSqlMi(ProxyResourceAutoGenerated):
"""Database Migration Resource for SQL Managed Instance.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -4165,7 +4704,7 @@ class DatabaseMigrationSqlMi(ProxyResource):
:ivar type:
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
- :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated
:ivar properties: Database Migration Resource properties for SQL Managed Instance.
:vartype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationPropertiesSqlMi
"""
@@ -4181,21 +4720,22 @@ class DatabaseMigrationSqlMi(ProxyResource):
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
- "system_data": {"key": "systemData", "type": "SystemData"},
+ "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"},
"properties": {"key": "properties", "type": "DatabaseMigrationPropertiesSqlMi"},
}
- def __init__(self, *, properties: Optional["_models.DatabaseMigrationPropertiesSqlMi"] = None, **kwargs):
+ def __init__(
+ self, *, properties: Optional["_models.DatabaseMigrationPropertiesSqlMi"] = None, **kwargs: Any
+ ) -> None:
"""
:keyword properties: Database Migration Resource properties for SQL Managed Instance.
:paramtype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationPropertiesSqlMi
"""
super().__init__(**kwargs)
- self.system_data = None
self.properties = properties
-class DatabaseMigrationSqlVm(ProxyResource):
+class DatabaseMigrationSqlVm(ProxyResourceAutoGenerated):
"""Database Migration Resource for SQL Virtual Machine.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -4207,7 +4747,7 @@ class DatabaseMigrationSqlVm(ProxyResource):
:ivar type:
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
- :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated
:ivar properties: Database Migration Resource properties for SQL Virtual Machine.
:vartype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationPropertiesSqlVm
"""
@@ -4223,17 +4763,18 @@ class DatabaseMigrationSqlVm(ProxyResource):
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
- "system_data": {"key": "systemData", "type": "SystemData"},
+ "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"},
"properties": {"key": "properties", "type": "DatabaseMigrationPropertiesSqlVm"},
}
- def __init__(self, *, properties: Optional["_models.DatabaseMigrationPropertiesSqlVm"] = None, **kwargs):
+ def __init__(
+ self, *, properties: Optional["_models.DatabaseMigrationPropertiesSqlVm"] = None, **kwargs: Any
+ ) -> None:
"""
:keyword properties: Database Migration Resource properties for SQL Virtual Machine.
:paramtype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationPropertiesSqlVm
"""
super().__init__(**kwargs)
- self.system_data = None
self.properties = properties
@@ -4266,7 +4807,7 @@ class DatabaseObjectName(_serialization.Model):
"object_type": {"key": "objectType", "type": "str"},
}
- def __init__(self, *, object_type: Optional[Union[str, "_models.ObjectType"]] = None, **kwargs):
+ def __init__(self, *, object_type: Optional[Union[str, "_models.ObjectType"]] = None, **kwargs: Any) -> None:
"""
:keyword object_type: Type of the object in the database. Known values are: "StoredProcedures",
"Table", "User", "View", and "Function".
@@ -4330,7 +4871,7 @@ class DataItemMigrationSummaryResult(_serialization.Model):
"result_prefix": {"key": "resultPrefix", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.name = None
@@ -4399,7 +4940,7 @@ class DatabaseSummaryResult(DataItemMigrationSummaryResult):
"size_mb": {"key": "sizeMB", "type": "float"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.size_mb = None
@@ -4426,7 +4967,7 @@ class DatabaseTable(_serialization.Model):
"name": {"key": "name", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.has_rows = None
@@ -4453,8 +4994,8 @@ def __init__(
*,
failed_objects: Optional[Dict[str, str]] = None,
validation_errors: Optional["_models.ValidationError"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword failed_objects: List of failed table names of source and target pair.
:paramtype failed_objects: dict[str, str]
@@ -4487,7 +5028,7 @@ class DataMigrationError(_serialization.Model):
"type": {"key": "type", "type": "str"},
}
- def __init__(self, *, type: Optional[Union[str, "_models.ErrorType"]] = None, **kwargs):
+ def __init__(self, *, type: Optional[Union[str, "_models.ErrorType"]] = None, **kwargs: Any) -> None:
"""
:keyword type: Error type. Known values are: "Default", "Warning", and "Error".
:paramtype type: str or ~azure.mgmt.datamigration.models.ErrorType
@@ -4543,7 +5084,7 @@ class DataMigrationProjectMetadata(_serialization.Model):
"selected_migration_tables": {"key": "selectedMigrationTables", "type": "[MigrationTableMetadata]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.source_server_name = None
@@ -4556,8 +5097,8 @@ def __init__(self, **kwargs):
self.selected_migration_tables = None
-class TrackedResource(_serialization.Model):
- """TrackedResource.
+class TrackedResourceAutoGenerated(_serialization.Model):
+ """TrackedResourceAutoGenerated.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -4572,7 +5113,7 @@ class TrackedResource(_serialization.Model):
:ivar type:
:vartype type: str
:ivar system_data:
- :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated
"""
_validation = {
@@ -4588,10 +5129,10 @@ class TrackedResource(_serialization.Model):
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
- "system_data": {"key": "systemData", "type": "SystemData"},
+ "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"},
}
- def __init__(self, *, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs):
+ def __init__(self, *, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None:
"""
:keyword location:
:paramtype location: str
@@ -4607,8 +5148,8 @@ def __init__(self, *, location: Optional[str] = None, tags: Optional[Dict[str, s
self.system_data = None
-class DataMigrationService(TrackedResource): # pylint: disable=too-many-instance-attributes
- """A Database Migration Service resource.
+class DataMigrationService(TrackedResourceAutoGenerated):
+ """An Azure Database Migration Service (classic) resource.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -4623,7 +5164,7 @@ class DataMigrationService(TrackedResource): # pylint: disable=too-many-instanc
:ivar type:
:vartype type: str
:ivar system_data:
- :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated
:ivar etag: HTTP strong entity tag value. Ignored if submitted.
:vartype etag: str
:ivar kind: The resource kind. Only 'vm' (the default) is supported.
@@ -4663,7 +5204,7 @@ class DataMigrationService(TrackedResource): # pylint: disable=too-many-instanc
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
- "system_data": {"key": "systemData", "type": "SystemData"},
+ "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"},
"etag": {"key": "etag", "type": "str"},
"kind": {"key": "kind", "type": "str"},
"sku": {"key": "sku", "type": "ServiceSku"},
@@ -4688,8 +5229,8 @@ def __init__(
virtual_nic_id: Optional[str] = None,
auto_stop_delay: Optional[str] = None,
delete_resources_on_stop: Optional[bool] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword location:
:paramtype location: str
@@ -4743,8 +5284,12 @@ class DataMigrationServiceList(_serialization.Model):
}
def __init__(
- self, *, value: Optional[List["_models.DataMigrationService"]] = None, next_link: Optional[str] = None, **kwargs
- ):
+ self,
+ *,
+ value: Optional[List["_models.DataMigrationService"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword value: List of services.
:paramtype value: list[~azure.mgmt.datamigration.models.DataMigrationService]
@@ -4789,8 +5334,8 @@ def __init__(
status: Optional[str] = None,
vm_size: Optional[str] = None,
supported_task_types: Optional[List[str]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword agent_version: The DMS instance agent version.
:paramtype agent_version: str
@@ -4827,7 +5372,9 @@ class DeleteNode(_serialization.Model):
"integration_runtime_name": {"key": "integrationRuntimeName", "type": "str"},
}
- def __init__(self, *, node_name: Optional[str] = None, integration_runtime_name: Optional[str] = None, **kwargs):
+ def __init__(
+ self, *, node_name: Optional[str] = None, integration_runtime_name: Optional[str] = None, **kwargs: Any
+ ) -> None:
"""
:keyword node_name: The name of node to delete.
:paramtype node_name: str
@@ -4839,6 +5386,77 @@ def __init__(self, *, node_name: Optional[str] = None, integration_runtime_name:
self.integration_runtime_name = integration_runtime_name
+class ErrorAdditionalInfo(_serialization.Model):
+ """The resource management error additional info.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The additional info type.
+ :vartype type: str
+ :ivar info: The additional info.
+ :vartype info: JSON
+ """
+
+ _validation = {
+ "type": {"readonly": True},
+ "info": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "type": {"key": "type", "type": "str"},
+ "info": {"key": "info", "type": "object"},
+ }
+
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
+ super().__init__(**kwargs)
+ self.type = None
+ self.info = None
+
+
+class ErrorDetail(_serialization.Model):
+ """The error detail.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: The error code.
+ :vartype code: str
+ :ivar message: The error message.
+ :vartype message: str
+ :ivar target: The error target.
+ :vartype target: str
+ :ivar details: The error details.
+ :vartype details: list[~azure.mgmt.datamigration.models.ErrorDetail]
+ :ivar additional_info: The error additional info.
+ :vartype additional_info: list[~azure.mgmt.datamigration.models.ErrorAdditionalInfo]
+ """
+
+ _validation = {
+ "code": {"readonly": True},
+ "message": {"readonly": True},
+ "target": {"readonly": True},
+ "details": {"readonly": True},
+ "additional_info": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "code": {"key": "code", "type": "str"},
+ "message": {"key": "message", "type": "str"},
+ "target": {"key": "target", "type": "str"},
+ "details": {"key": "details", "type": "[ErrorDetail]"},
+ "additional_info": {"key": "additionalInfo", "type": "[ErrorAdditionalInfo]"},
+ }
+
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
+ super().__init__(**kwargs)
+ self.code = None
+ self.message = None
+ self.target = None
+ self.details = None
+ self.additional_info = None
+
+
class ErrorInfo(_serialization.Model):
"""Error details.
@@ -4860,13 +5478,34 @@ class ErrorInfo(_serialization.Model):
"message": {"key": "message", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.code = None
self.message = None
+class ErrorResponse(_serialization.Model):
+ """Common error response for all Azure Resource Manager APIs to return error details for failed
+ operations. (This also follows the OData error response format.).
+
+ :ivar error: The error object.
+ :vartype error: ~azure.mgmt.datamigration.models.ErrorDetail
+ """
+
+ _attribute_map = {
+ "error": {"key": "error", "type": "ErrorDetail"},
+ }
+
+ def __init__(self, *, error: Optional["_models.ErrorDetail"] = None, **kwargs: Any) -> None:
+ """
+ :keyword error: The error object.
+ :paramtype error: ~azure.mgmt.datamigration.models.ErrorDetail
+ """
+ super().__init__(**kwargs)
+ self.error = error
+
+
class ExecutionStatistics(_serialization.Model):
"""Description about the errors happen while performing migration validation.
@@ -4902,8 +5541,8 @@ def __init__(
wait_stats: Optional[Dict[str, "_models.WaitStatistics"]] = None,
has_errors: Optional[bool] = None,
sql_errors: Optional[List[str]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword execution_count: No. of query executions.
:paramtype execution_count: int
@@ -4943,8 +5582,8 @@ class FileList(_serialization.Model):
}
def __init__(
- self, *, value: Optional[List["_models.ProjectFile"]] = None, next_link: Optional[str] = None, **kwargs
- ):
+ self, *, value: Optional[List["_models.ProjectFile"]] = None, next_link: Optional[str] = None, **kwargs: Any
+ ) -> None:
"""
:keyword value: List of files.
:paramtype value: list[~azure.mgmt.datamigration.models.ProjectFile]
@@ -4959,7 +5598,7 @@ def __init__(
class FileShare(_serialization.Model):
"""File share information with Path, Username, and Password.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar user_name: User name credential to connect to the share location.
:vartype user_name: str
@@ -4979,7 +5618,9 @@ class FileShare(_serialization.Model):
"path": {"key": "path", "type": "str"},
}
- def __init__(self, *, path: str, user_name: Optional[str] = None, password: Optional[str] = None, **kwargs):
+ def __init__(
+ self, *, path: str, user_name: Optional[str] = None, password: Optional[str] = None, **kwargs: Any
+ ) -> None:
"""
:keyword user_name: User name credential to connect to the share location.
:paramtype user_name: str
@@ -5008,7 +5649,7 @@ class FileStorageInfo(_serialization.Model):
"headers": {"key": "headers", "type": "{str}"},
}
- def __init__(self, *, uri: Optional[str] = None, headers: Optional[Dict[str, str]] = None, **kwargs):
+ def __init__(self, *, uri: Optional[str] = None, headers: Optional[Dict[str, str]] = None, **kwargs: Any) -> None:
"""
:keyword uri: A URI that can be used to access the file content.
:paramtype uri: str
@@ -5023,7 +5664,7 @@ def __init__(self, *, uri: Optional[str] = None, headers: Optional[Dict[str, str
class GetProjectDetailsNonSqlTaskInput(_serialization.Model):
"""Input for the task that reads configuration from project artifacts.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar project_name: Name of the migration project. Required.
:vartype project_name: str
@@ -5042,7 +5683,7 @@ class GetProjectDetailsNonSqlTaskInput(_serialization.Model):
"project_location": {"key": "projectLocation", "type": "str"},
}
- def __init__(self, *, project_name: str, project_location: str, **kwargs):
+ def __init__(self, *, project_name: str, project_location: str, **kwargs: Any) -> None:
"""
:keyword project_name: Name of the migration project. Required.
:paramtype project_name: str
@@ -5058,7 +5699,7 @@ def __init__(self, *, project_name: str, project_location: str, **kwargs):
class GetTdeCertificatesSqlTaskInput(_serialization.Model):
"""Input for the task that gets TDE certificates in Base64 encoded format.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar connection_info: Connection information for SQL Server. Required.
:vartype connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -5088,8 +5729,8 @@ def __init__(
connection_info: "_models.SqlConnectionInfo",
backup_file_share: "_models.FileShare",
selected_certificates: List["_models.SelectedCertificateInput"],
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword connection_info: Connection information for SQL Server. Required.
:paramtype connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -5128,7 +5769,7 @@ class GetTdeCertificatesSqlTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.base64_encoded_certificates = None
@@ -5140,7 +5781,7 @@ class GetTdeCertificatesSqlTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -5199,8 +5840,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.GetTdeCertificatesSqlTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -5216,7 +5857,7 @@ def __init__(
class GetUserTablesMySqlTaskInput(_serialization.Model):
"""Input for the task that collects user tables for the given list of databases.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar connection_info: Connection information for SQL Server. Required.
:vartype connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo
@@ -5234,7 +5875,9 @@ class GetUserTablesMySqlTaskInput(_serialization.Model):
"selected_databases": {"key": "selectedDatabases", "type": "[str]"},
}
- def __init__(self, *, connection_info: "_models.MySqlConnectionInfo", selected_databases: List[str], **kwargs):
+ def __init__(
+ self, *, connection_info: "_models.MySqlConnectionInfo", selected_databases: List[str], **kwargs: Any
+ ) -> None:
"""
:keyword connection_info: Connection information for SQL Server. Required.
:paramtype connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo
@@ -5271,7 +5914,7 @@ class GetUserTablesMySqlTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -5284,7 +5927,7 @@ class GetUserTablesMySqlTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -5343,8 +5986,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.GetUserTablesMySqlTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -5358,9 +6001,10 @@ def __init__(
class GetUserTablesOracleTaskInput(_serialization.Model):
- """Input for the task that gets the list of tables contained within a provided list of Oracle schemas.
+ """Input for the task that gets the list of tables contained within a provided list of Oracle
+ schemas.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar connection_info: Information for connecting to Oracle source. Required.
:vartype connection_info: ~azure.mgmt.datamigration.models.OracleConnectionInfo
@@ -5378,7 +6022,9 @@ class GetUserTablesOracleTaskInput(_serialization.Model):
"selected_schemas": {"key": "selectedSchemas", "type": "[str]"},
}
- def __init__(self, *, connection_info: "_models.OracleConnectionInfo", selected_schemas: List[str], **kwargs):
+ def __init__(
+ self, *, connection_info: "_models.OracleConnectionInfo", selected_schemas: List[str], **kwargs: Any
+ ) -> None:
"""
:keyword connection_info: Information for connecting to Oracle source. Required.
:paramtype connection_info: ~azure.mgmt.datamigration.models.OracleConnectionInfo
@@ -5391,7 +6037,8 @@ def __init__(self, *, connection_info: "_models.OracleConnectionInfo", selected_
class GetUserTablesOracleTaskOutput(_serialization.Model):
- """Output for the task that gets the list of tables contained within a provided list of Oracle schemas.
+ """Output for the task that gets the list of tables contained within a provided list of Oracle
+ schemas.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -5415,7 +6062,7 @@ class GetUserTablesOracleTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.schema_name = None
@@ -5428,7 +6075,7 @@ class GetUserTablesOracleTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -5487,8 +6134,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.GetUserTablesOracleTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -5504,7 +6151,7 @@ def __init__(
class GetUserTablesPostgreSqlTaskInput(_serialization.Model):
"""Input for the task that gets the list of tables for a provided list of PostgreSQL databases.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar connection_info: Information for connecting to PostgreSQL source. Required.
:vartype connection_info: ~azure.mgmt.datamigration.models.PostgreSqlConnectionInfo
@@ -5522,7 +6169,9 @@ class GetUserTablesPostgreSqlTaskInput(_serialization.Model):
"selected_databases": {"key": "selectedDatabases", "type": "[str]"},
}
- def __init__(self, *, connection_info: "_models.PostgreSqlConnectionInfo", selected_databases: List[str], **kwargs):
+ def __init__(
+ self, *, connection_info: "_models.PostgreSqlConnectionInfo", selected_databases: List[str], **kwargs: Any
+ ) -> None:
"""
:keyword connection_info: Information for connecting to PostgreSQL source. Required.
:paramtype connection_info: ~azure.mgmt.datamigration.models.PostgreSqlConnectionInfo
@@ -5560,7 +6209,7 @@ class GetUserTablesPostgreSqlTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.database_name = None
@@ -5573,7 +6222,7 @@ class GetUserTablesPostgreSqlTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -5632,8 +6281,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.GetUserTablesPostgreSqlTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -5649,7 +6298,7 @@ def __init__(
class GetUserTablesSqlSyncTaskInput(_serialization.Model):
"""Input for the task that collects user tables for the given list of databases.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Connection information for SQL Server. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -5682,8 +6331,8 @@ def __init__(
target_connection_info: "_models.SqlConnectionInfo",
selected_source_databases: List[str],
selected_target_databases: List[str],
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Connection information for SQL Server. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -5732,7 +6381,7 @@ class GetUserTablesSqlSyncTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.databases_to_source_tables = None
@@ -5746,7 +6395,7 @@ class GetUserTablesSqlSyncTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -5805,8 +6454,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.GetUserTablesSqlSyncTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -5822,7 +6471,7 @@ def __init__(
class GetUserTablesSqlTaskInput(_serialization.Model):
"""Input for the task that collects user tables for the given list of databases.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar connection_info: Connection information for SQL Server. Required.
:vartype connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -5849,8 +6498,8 @@ def __init__(
connection_info: "_models.SqlConnectionInfo",
selected_databases: List[str],
encrypted_key_for_secure_fields: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword connection_info: Connection information for SQL Server. Required.
:paramtype connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -5890,7 +6539,7 @@ class GetUserTablesSqlTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -5903,7 +6552,7 @@ class GetUserTablesSqlTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -5966,8 +6615,8 @@ def __init__(
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.GetUserTablesSqlTaskInput"] = None,
task_id: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -5994,7 +6643,7 @@ class InstallOCIDriverTaskInput(_serialization.Model):
"driver_package_name": {"key": "driverPackageName", "type": "str"},
}
- def __init__(self, *, driver_package_name: Optional[str] = None, **kwargs):
+ def __init__(self, *, driver_package_name: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword driver_package_name: Name of the uploaded driver package to install.
:paramtype driver_package_name: str
@@ -6020,7 +6669,7 @@ class InstallOCIDriverTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.validation_errors = None
@@ -6031,7 +6680,7 @@ class InstallOCIDriverTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -6090,8 +6739,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.InstallOCIDriverTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -6125,17 +6774,81 @@ class IntegrationRuntimeMonitoringData(_serialization.Model):
"nodes": {"key": "nodes", "type": "[NodeMonitoringData]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.name = None
self.nodes = None
+class ManagedServiceIdentity(_serialization.Model):
+ """Managed service identity (system assigned and/or user assigned identities).
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar principal_id: The service principal ID of the system assigned identity. This property
+ will only be provided for a system assigned identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of the system assigned identity. This property will only be
+ provided for a system assigned identity.
+ :vartype tenant_id: str
+ :ivar type: Type of managed service identity (where both SystemAssigned and UserAssigned types
+ are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and
+ "SystemAssigned,UserAssigned".
+ :vartype type: str or ~azure.mgmt.datamigration.models.ManagedServiceIdentityType
+ :ivar user_assigned_identities: The set of user assigned identities associated with the
+ resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form:
+ '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. # pylint: disable=line-too-long
+ The dictionary values can be empty objects ({}) in requests.
+ :vartype user_assigned_identities: dict[str,
+ ~azure.mgmt.datamigration.models.UserAssignedIdentity]
+ """
+
+ _validation = {
+ "principal_id": {"readonly": True},
+ "tenant_id": {"readonly": True},
+ "type": {"required": True},
+ }
+
+ _attribute_map = {
+ "principal_id": {"key": "principalId", "type": "str"},
+ "tenant_id": {"key": "tenantId", "type": "str"},
+ "type": {"key": "type", "type": "str"},
+ "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{UserAssignedIdentity}"},
+ }
+
+ def __init__(
+ self,
+ *,
+ type: Union[str, "_models.ManagedServiceIdentityType"],
+ user_assigned_identities: Optional[Dict[str, "_models.UserAssignedIdentity"]] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword type: Type of managed service identity (where both SystemAssigned and UserAssigned
+ types are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and
+ "SystemAssigned,UserAssigned".
+ :paramtype type: str or ~azure.mgmt.datamigration.models.ManagedServiceIdentityType
+ :keyword user_assigned_identities: The set of user assigned identities associated with the
+ resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form:
+ '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. # pylint: disable=line-too-long
+ The dictionary values can be empty objects ({}) in requests.
+ :paramtype user_assigned_identities: dict[str,
+ ~azure.mgmt.datamigration.models.UserAssignedIdentity]
+ """
+ super().__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.type = type
+ self.user_assigned_identities = user_assigned_identities
+
+
class MigrateMISyncCompleteCommandInput(_serialization.Model):
"""Input for command that completes online migration for an Azure SQL Database Managed Instance.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_database_name: Name of managed instance database. Required.
:vartype source_database_name: str
@@ -6149,7 +6862,7 @@ class MigrateMISyncCompleteCommandInput(_serialization.Model):
"source_database_name": {"key": "sourceDatabaseName", "type": "str"},
}
- def __init__(self, *, source_database_name: str, **kwargs):
+ def __init__(self, *, source_database_name: str, **kwargs: Any) -> None:
"""
:keyword source_database_name: Name of managed instance database. Required.
:paramtype source_database_name: str
@@ -6169,7 +6882,7 @@ class MigrateMISyncCompleteCommandOutput(_serialization.Model):
"errors": {"key": "errors", "type": "[ReportableException]"},
}
- def __init__(self, *, errors: Optional[List["_models.ReportableException"]] = None, **kwargs):
+ def __init__(self, *, errors: Optional[List["_models.ReportableException"]] = None, **kwargs: Any) -> None:
"""
:keyword errors: List of errors that happened during the command execution.
:paramtype errors: list[~azure.mgmt.datamigration.models.ReportableException]
@@ -6179,11 +6892,12 @@ def __init__(self, *, errors: Optional[List["_models.ReportableException"]] = No
class MigrateMISyncCompleteCommandProperties(CommandProperties):
- """Properties for the command that completes online migration for an Azure SQL Database Managed Instance.
+ """Properties for the command that completes online migration for an Azure SQL Database Managed
+ Instance.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar command_type: Command type. Required. Known values are: "Migrate.Sync.Complete.Database",
"Migrate.SqlServer.AzureDbSqlMi.Complete", "cancel", "finish", and "restart".
@@ -6214,7 +6928,7 @@ class MigrateMISyncCompleteCommandProperties(CommandProperties):
"output": {"key": "output", "type": "MigrateMISyncCompleteCommandOutput"},
}
- def __init__(self, *, input: Optional["_models.MigrateMISyncCompleteCommandInput"] = None, **kwargs):
+ def __init__(self, *, input: Optional["_models.MigrateMISyncCompleteCommandInput"] = None, **kwargs: Any) -> None:
"""
:keyword input: Command input.
:paramtype input: ~azure.mgmt.datamigration.models.MigrateMISyncCompleteCommandInput
@@ -6230,7 +6944,7 @@ class MigrateMongoDbTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -6289,8 +7003,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.MongoDbMigrationSettings"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -6303,8 +7017,9 @@ def __init__(
self.output = None
-class MigrateMySqlAzureDbForMySqlOfflineDatabaseInput(_serialization.Model):
- """Database specific information for offline MySQL to Azure Database for MySQL migration task inputs.
+class MigrateMySqlAzureDbForMySqlOfflineDatabaseInput(_serialization.Model): # pylint: disable=name-too-long
+ """Database specific information for offline MySQL to Azure Database for MySQL migration task
+ inputs.
:ivar name: Name of the database.
:vartype name: str
@@ -6327,8 +7042,8 @@ def __init__(
name: Optional[str] = None,
target_database_name: Optional[str] = None,
table_map: Optional[Dict[str, str]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword name: Name of the database.
:paramtype name: str
@@ -6344,10 +7059,11 @@ def __init__(
self.table_map = table_map
-class MigrateMySqlAzureDbForMySqlOfflineTaskInput(_serialization.Model):
- """Input for the task that migrates MySQL databases to Azure Database for MySQL for offline migrations.
+class MigrateMySqlAzureDbForMySqlOfflineTaskInput(_serialization.Model): # pylint: disable=name-too-long
+ """Input for the task that migrates MySQL databases to Azure Database for MySQL for offline
+ migrations.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Connection information for source MySQL. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo
@@ -6394,8 +7110,8 @@ def __init__(
started_on: Optional[datetime.datetime] = None,
optional_agent_settings: Optional[Dict[str, str]] = None,
encrypted_key_for_secure_fields: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Connection information for source MySQL. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo
@@ -6425,8 +7141,9 @@ def __init__(
self.encrypted_key_for_secure_fields = encrypted_key_for_secure_fields
-class MigrateMySqlAzureDbForMySqlOfflineTaskOutput(_serialization.Model):
- """Output for the task that migrates MySQL databases to Azure Database for MySQL for offline migrations.
+class MigrateMySqlAzureDbForMySqlOfflineTaskOutput(_serialization.Model): # pylint: disable=name-too-long
+ """Output for the task that migrates MySQL databases to Azure Database for MySQL for offline
+ migrations.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
MigrateMySqlAzureDbForMySqlOfflineTaskOutputDatabaseLevel,
@@ -6436,7 +7153,7 @@ class MigrateMySqlAzureDbForMySqlOfflineTaskOutput(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -6463,7 +7180,7 @@ class MigrateMySqlAzureDbForMySqlOfflineTaskOutput(_serialization.Model):
}
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -6472,12 +7189,12 @@ def __init__(self, **kwargs):
class MigrateMySqlAzureDbForMySqlOfflineTaskOutputDatabaseLevel(
MigrateMySqlAzureDbForMySqlOfflineTaskOutput
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigrateMySqlAzureDbForMySqlOfflineTaskOutputDatabaseLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -6558,7 +7275,7 @@ class MigrateMySqlAzureDbForMySqlOfflineTaskOutputDatabaseLevel(
"object_summary": {"key": "objectSummary", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "DatabaseLevelOutput"
@@ -6579,12 +7296,14 @@ def __init__(self, **kwargs):
self.object_summary = None
-class MigrateMySqlAzureDbForMySqlOfflineTaskOutputError(MigrateMySqlAzureDbForMySqlOfflineTaskOutput):
+class MigrateMySqlAzureDbForMySqlOfflineTaskOutputError(
+ MigrateMySqlAzureDbForMySqlOfflineTaskOutput
+): # pylint: disable=name-too-long
"""MigrateMySqlAzureDbForMySqlOfflineTaskOutputError.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -6606,7 +7325,7 @@ class MigrateMySqlAzureDbForMySqlOfflineTaskOutputError(MigrateMySqlAzureDbForMy
"error": {"key": "error", "type": "ReportableException"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "ErrorOutput"
@@ -6615,12 +7334,12 @@ def __init__(self, **kwargs):
class MigrateMySqlAzureDbForMySqlOfflineTaskOutputMigrationLevel(
MigrateMySqlAzureDbForMySqlOfflineTaskOutput
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigrateMySqlAzureDbForMySqlOfflineTaskOutputMigrationLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -6704,8 +7423,8 @@ def __init__(
*,
databases: Optional[str] = None,
migration_report_result: Optional["_models.MigrationReportResult"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword databases: Selected databases as a map from database name to database id.
:paramtype databases: str
@@ -6734,12 +7453,12 @@ def __init__(
class MigrateMySqlAzureDbForMySqlOfflineTaskOutputTableLevel(
MigrateMySqlAzureDbForMySqlOfflineTaskOutput
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigrateMySqlAzureDbForMySqlOfflineTaskOutputTableLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -6799,7 +7518,7 @@ class MigrateMySqlAzureDbForMySqlOfflineTaskOutputTableLevel(
"last_storage_update": {"key": "lastStorageUpdate", "type": "iso-8601"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "TableLevelOutput"
@@ -6815,12 +7534,13 @@ def __init__(self, **kwargs):
self.last_storage_update = None
-class MigrateMySqlAzureDbForMySqlOfflineTaskProperties(ProjectTaskProperties):
- """Properties for the task that migrates MySQL databases to Azure Database for MySQL for offline migrations.
+class MigrateMySqlAzureDbForMySqlOfflineTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long
+ """Properties for the task that migrates MySQL databases to Azure Database for MySQL for offline
+ migrations.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -6888,8 +7608,8 @@ def __init__(
input: Optional["_models.MigrateMySqlAzureDbForMySqlOfflineTaskInput"] = None,
is_cloneable: Optional[bool] = None,
task_id: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -6908,7 +7628,7 @@ def __init__(
self.task_id = task_id
-class MigrateMySqlAzureDbForMySqlSyncDatabaseInput(_serialization.Model):
+class MigrateMySqlAzureDbForMySqlSyncDatabaseInput(_serialization.Model): # pylint: disable=name-too-long
"""Database specific information for MySQL to Azure Database for MySQL migration task inputs.
:ivar name: Name of the database.
@@ -6944,8 +7664,8 @@ def __init__(
source_setting: Optional[Dict[str, str]] = None,
target_setting: Optional[Dict[str, str]] = None,
table_map: Optional[Dict[str, str]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword name: Name of the database.
:paramtype name: str
@@ -6971,9 +7691,10 @@ def __init__(
class MigrateMySqlAzureDbForMySqlSyncTaskInput(_serialization.Model):
- """Input for the task that migrates MySQL databases to Azure Database for MySQL for online migrations.
+ """Input for the task that migrates MySQL databases to Azure Database for MySQL for online
+ migrations.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Connection information for source MySQL. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo
@@ -7003,8 +7724,8 @@ def __init__(
source_connection_info: "_models.MySqlConnectionInfo",
target_connection_info: "_models.MySqlConnectionInfo",
selected_databases: List["_models.MigrateMySqlAzureDbForMySqlSyncDatabaseInput"],
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Connection information for source MySQL. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo
@@ -7021,8 +7742,9 @@ def __init__(
self.selected_databases = selected_databases
-class MigrateMySqlAzureDbForMySqlSyncTaskOutput(_serialization.Model):
- """Output for the task that migrates MySQL databases to Azure Database for MySQL for online migrations.
+class MigrateMySqlAzureDbForMySqlSyncTaskOutput(_serialization.Model): # pylint: disable=name-too-long
+ """Output for the task that migrates MySQL databases to Azure Database for MySQL for online
+ migrations.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseError,
@@ -7033,7 +7755,7 @@ class MigrateMySqlAzureDbForMySqlSyncTaskOutput(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -7061,19 +7783,21 @@ class MigrateMySqlAzureDbForMySqlSyncTaskOutput(_serialization.Model):
}
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
self.result_type: Optional[str] = None
-class MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseError(MigrateMySqlAzureDbForMySqlSyncTaskOutput):
+class MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseError(
+ MigrateMySqlAzureDbForMySqlSyncTaskOutput
+): # pylint: disable=name-too-long
"""MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseError.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -7102,8 +7826,8 @@ def __init__(
*,
error_message: Optional[str] = None,
events: Optional[List["_models.SyncMigrationDatabaseErrorEvent"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword error_message: Error message.
:paramtype error_message: str
@@ -7118,12 +7842,12 @@ def __init__(
class MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseLevel(
MigrateMySqlAzureDbForMySqlSyncTaskOutput
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -7206,7 +7930,7 @@ class MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseLevel(
"latency": {"key": "latency", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "DatabaseLevelOutput"
@@ -7227,12 +7951,14 @@ def __init__(self, **kwargs):
self.latency = None
-class MigrateMySqlAzureDbForMySqlSyncTaskOutputError(MigrateMySqlAzureDbForMySqlSyncTaskOutput):
+class MigrateMySqlAzureDbForMySqlSyncTaskOutputError(
+ MigrateMySqlAzureDbForMySqlSyncTaskOutput
+): # pylint: disable=name-too-long
"""MigrateMySqlAzureDbForMySqlSyncTaskOutputError.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -7254,19 +7980,21 @@ class MigrateMySqlAzureDbForMySqlSyncTaskOutputError(MigrateMySqlAzureDbForMySql
"error": {"key": "error", "type": "ReportableException"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "ErrorOutput"
self.error = None
-class MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevel(MigrateMySqlAzureDbForMySqlSyncTaskOutput):
+class MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevel(
+ MigrateMySqlAzureDbForMySqlSyncTaskOutput
+): # pylint: disable=name-too-long
"""MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -7308,7 +8036,7 @@ class MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevel(MigrateMySqlAzureD
"target_server": {"key": "targetServer", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "MigrationLevelOutput"
@@ -7322,12 +8050,12 @@ def __init__(self, **kwargs):
class MigrateMySqlAzureDbForMySqlSyncTaskOutputTableLevel(
MigrateMySqlAzureDbForMySqlSyncTaskOutput
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigrateMySqlAzureDbForMySqlSyncTaskOutputTableLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -7398,7 +8126,7 @@ class MigrateMySqlAzureDbForMySqlSyncTaskOutputTableLevel(
"last_modified_time": {"key": "lastModifiedTime", "type": "iso-8601"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "TableLevelOutput"
@@ -7417,12 +8145,13 @@ def __init__(self, **kwargs):
self.last_modified_time = None
-class MigrateMySqlAzureDbForMySqlSyncTaskProperties(ProjectTaskProperties):
- """Properties for the task that migrates MySQL databases to Azure Database for MySQL for online migrations.
+class MigrateMySqlAzureDbForMySqlSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long
+ """Properties for the task that migrates MySQL databases to Azure Database for MySQL for online
+ migrations.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -7482,8 +8211,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.MigrateMySqlAzureDbForMySqlSyncTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -7496,12 +8225,13 @@ def __init__(
self.output = None
-class MigrateOracleAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties):
- """Properties for the task that migrates Oracle to Azure Database for PostgreSQL for online migrations.
+class MigrateOracleAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long
+ """Properties for the task that migrates Oracle to Azure Database for PostgreSQL for online
+ migrations.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -7561,8 +8291,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.MigrateOracleAzureDbPostgreSqlSyncTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -7575,8 +8305,9 @@ def __init__(
self.output = None
-class MigrateOracleAzureDbPostgreSqlSyncDatabaseInput(_serialization.Model):
- """Database specific information for Oracle to Azure Database for PostgreSQL migration task inputs.
+class MigrateOracleAzureDbPostgreSqlSyncDatabaseInput(_serialization.Model): # pylint: disable=name-too-long
+ """Database specific information for Oracle to Azure Database for PostgreSQL migration task
+ inputs.
:ivar case_manipulation: How to handle object name casing: either Preserve or ToLower.
:vartype case_manipulation: str
@@ -7619,8 +8350,8 @@ def __init__(
migration_setting: Optional[Dict[str, str]] = None,
source_setting: Optional[Dict[str, str]] = None,
target_setting: Optional[Dict[str, str]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword case_manipulation: How to handle object name casing: either Preserve or ToLower.
:paramtype case_manipulation: str
@@ -7651,10 +8382,11 @@ def __init__(
self.target_setting = target_setting
-class MigrateOracleAzureDbPostgreSqlSyncTaskInput(_serialization.Model):
- """Input for the task that migrates Oracle databases to Azure Database for PostgreSQL for online migrations.
+class MigrateOracleAzureDbPostgreSqlSyncTaskInput(_serialization.Model): # pylint: disable=name-too-long
+ """Input for the task that migrates Oracle databases to Azure Database for PostgreSQL for online
+ migrations.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar selected_databases: Databases to migrate. Required.
:vartype selected_databases:
@@ -7684,8 +8416,8 @@ def __init__(
selected_databases: List["_models.MigrateOracleAzureDbPostgreSqlSyncDatabaseInput"],
target_connection_info: "_models.PostgreSqlConnectionInfo",
source_connection_info: "_models.OracleConnectionInfo",
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword selected_databases: Databases to migrate. Required.
:paramtype selected_databases:
@@ -7702,8 +8434,9 @@ def __init__(
self.source_connection_info = source_connection_info
-class MigrateOracleAzureDbPostgreSqlSyncTaskOutput(_serialization.Model):
- """Output for the task that migrates Oracle databases to Azure Database for PostgreSQL for online migrations.
+class MigrateOracleAzureDbPostgreSqlSyncTaskOutput(_serialization.Model): # pylint: disable=name-too-long
+ """Output for the task that migrates Oracle databases to Azure Database for PostgreSQL for online
+ migrations.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseError,
@@ -7714,7 +8447,7 @@ class MigrateOracleAzureDbPostgreSqlSyncTaskOutput(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -7742,19 +8475,21 @@ class MigrateOracleAzureDbPostgreSqlSyncTaskOutput(_serialization.Model):
}
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
self.result_type: Optional[str] = None
-class MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseError(MigrateOracleAzureDbPostgreSqlSyncTaskOutput):
+class MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseError(
+ MigrateOracleAzureDbPostgreSqlSyncTaskOutput
+): # pylint: disable=name-too-long
"""MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseError.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -7783,8 +8518,8 @@ def __init__(
*,
error_message: Optional[str] = None,
events: Optional[List["_models.SyncMigrationDatabaseErrorEvent"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword error_message: Error message.
:paramtype error_message: str
@@ -7799,12 +8534,12 @@ def __init__(
class MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseLevel(
MigrateOracleAzureDbPostgreSqlSyncTaskOutput
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -7887,7 +8622,7 @@ class MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseLevel(
"latency": {"key": "latency", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "DatabaseLevelOutput"
@@ -7908,12 +8643,14 @@ def __init__(self, **kwargs):
self.latency = None
-class MigrateOracleAzureDbPostgreSqlSyncTaskOutputError(MigrateOracleAzureDbPostgreSqlSyncTaskOutput):
+class MigrateOracleAzureDbPostgreSqlSyncTaskOutputError(
+ MigrateOracleAzureDbPostgreSqlSyncTaskOutput
+): # pylint: disable=name-too-long
"""MigrateOracleAzureDbPostgreSqlSyncTaskOutputError.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -7935,19 +8672,21 @@ class MigrateOracleAzureDbPostgreSqlSyncTaskOutputError(MigrateOracleAzureDbPost
"error": {"key": "error", "type": "ReportableException"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "ErrorOutput"
self.error = None
-class MigrateOracleAzureDbPostgreSqlSyncTaskOutputMigrationLevel(MigrateOracleAzureDbPostgreSqlSyncTaskOutput):
+class MigrateOracleAzureDbPostgreSqlSyncTaskOutputMigrationLevel(
+ MigrateOracleAzureDbPostgreSqlSyncTaskOutput
+): # pylint: disable=name-too-long
"""MigrateOracleAzureDbPostgreSqlSyncTaskOutputMigrationLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -7989,7 +8728,7 @@ class MigrateOracleAzureDbPostgreSqlSyncTaskOutputMigrationLevel(MigrateOracleAz
"target_server": {"key": "targetServer", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "MigrationLevelOutput"
@@ -8003,12 +8742,12 @@ def __init__(self, **kwargs):
class MigrateOracleAzureDbPostgreSqlSyncTaskOutputTableLevel(
MigrateOracleAzureDbPostgreSqlSyncTaskOutput
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigrateOracleAzureDbPostgreSqlSyncTaskOutputTableLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -8079,7 +8818,7 @@ class MigrateOracleAzureDbPostgreSqlSyncTaskOutputTableLevel(
"last_modified_time": {"key": "lastModifiedTime", "type": "iso-8601"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "TableLevelOutput"
@@ -8098,8 +8837,9 @@ def __init__(self, **kwargs):
self.last_modified_time = None
-class MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInput(_serialization.Model):
- """Database specific information for PostgreSQL to Azure Database for PostgreSQL migration task inputs.
+class MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInput(_serialization.Model): # pylint: disable=name-too-long
+ """Database specific information for PostgreSQL to Azure Database for PostgreSQL migration task
+ inputs.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -8147,8 +8887,8 @@ def __init__(
source_setting: Optional[Dict[str, str]] = None,
target_setting: Optional[Dict[str, str]] = None,
selected_tables: Optional[List["_models.MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseTableInput"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword name: Name of the database.
:paramtype name: str
@@ -8175,7 +8915,9 @@ def __init__(
self.selected_tables = selected_tables
-class MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseTableInput(_serialization.Model):
+class MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseTableInput(
+ _serialization.Model
+): # pylint: disable=name-too-long
"""Selected tables for the migration.
:ivar name: Name of the table to migrate.
@@ -8186,7 +8928,7 @@ class MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseTableInput(_serialization
"name": {"key": "name", "type": "str"},
}
- def __init__(self, *, name: Optional[str] = None, **kwargs):
+ def __init__(self, *, name: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword name: Name of the table to migrate.
:paramtype name: str
@@ -8195,12 +8937,13 @@ def __init__(self, *, name: Optional[str] = None, **kwargs):
self.name = name
-class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInput(_serialization.Model):
- """Input for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for online migrations.
+class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInput(_serialization.Model): # pylint: disable=name-too-long
+ """Input for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for
+ online migrations.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar selected_databases: Databases to migrate. Required.
:vartype selected_databases:
@@ -8241,8 +8984,8 @@ def __init__(
target_connection_info: "_models.PostgreSqlConnectionInfo",
source_connection_info: "_models.PostgreSqlConnectionInfo",
encrypted_key_for_secure_fields: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword selected_databases: Databases to migrate. Required.
:paramtype selected_databases:
@@ -8263,8 +9006,9 @@ def __init__(
self.started_on = None
-class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model):
- """Output for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for online migrations.
+class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model): # pylint: disable=name-too-long
+ """Output for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for
+ online migrations.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseError,
@@ -8275,7 +9019,7 @@ class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -8303,7 +9047,7 @@ class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model):
}
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -8312,12 +9056,12 @@ def __init__(self, **kwargs):
class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseError(
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput
-):
+): # pylint: disable=name-too-long
"""MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseError.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -8346,8 +9090,8 @@ def __init__(
*,
error_message: Optional[str] = None,
events: Optional[List["_models.SyncMigrationDatabaseErrorEvent"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword error_message: Error message.
:paramtype error_message: str
@@ -8362,12 +9106,12 @@ def __init__(
class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseLevel(
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -8450,7 +9194,7 @@ class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseLevel(
"latency": {"key": "latency", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "DatabaseLevelOutput"
@@ -8471,12 +9215,14 @@ def __init__(self, **kwargs):
self.latency = None
-class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputError(MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput):
+class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputError(
+ MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput
+): # pylint: disable=name-too-long
"""MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputError.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -8501,7 +9247,9 @@ class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputError(MigratePostgreSql
"events": {"key": "events", "type": "[SyncMigrationDatabaseErrorEvent]"},
}
- def __init__(self, *, events: Optional[List["_models.SyncMigrationDatabaseErrorEvent"]] = None, **kwargs):
+ def __init__(
+ self, *, events: Optional[List["_models.SyncMigrationDatabaseErrorEvent"]] = None, **kwargs: Any
+ ) -> None:
"""
:keyword events: List of error events.
:paramtype events: list[~azure.mgmt.datamigration.models.SyncMigrationDatabaseErrorEvent]
@@ -8514,12 +9262,12 @@ def __init__(self, *, events: Optional[List["_models.SyncMigrationDatabaseErrorE
class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputMigrationLevel(
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputMigrationLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -8579,7 +9327,7 @@ class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputMigrationLevel(
"database_count": {"key": "databaseCount", "type": "float"},
}
- def __init__(self, *, database_count: Optional[float] = None, **kwargs):
+ def __init__(self, *, database_count: Optional[float] = None, **kwargs: Any) -> None:
"""
:keyword database_count: Number of databases to include.
:paramtype database_count: float
@@ -8600,12 +9348,12 @@ def __init__(self, *, database_count: Optional[float] = None, **kwargs):
class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputTableLevel(
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputTableLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -8676,7 +9424,7 @@ class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputTableLevel(
"last_modified_time": {"key": "lastModifiedTime", "type": "iso-8601"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "TableLevelOutput"
@@ -8695,12 +9443,13 @@ def __init__(self, **kwargs):
self.last_modified_time = None
-class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties):
- """Properties for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for online migrations.
+class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long
+ """Properties for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for
+ online migrations.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -8773,8 +9522,8 @@ def __init__(
task_id: Optional[str] = None,
created_on: Optional[str] = None,
is_cloneable: Optional[bool] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -8824,8 +9573,8 @@ def __init__(
id: Optional[str] = None, # pylint: disable=redefined-builtin
target_database_name: Optional[str] = None,
schema_setting: Optional["_models.SchemaMigrationSetting"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword name: Name of source database.
:paramtype name: str
@@ -8846,7 +9595,7 @@ def __init__(
class SqlMigrationTaskInput(_serialization.Model):
"""Base class for migration task input.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Information for connecting to source. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -8869,8 +9618,8 @@ def __init__(
*,
source_connection_info: "_models.SqlConnectionInfo",
target_connection_info: "_models.SqlConnectionInfo",
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Information for connecting to source. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -8885,7 +9634,7 @@ def __init__(
class MigrateSchemaSqlServerSqlDbTaskInput(SqlMigrationTaskInput):
"""Input for task that migrates Schema for SQL Server databases to Azure SQL databases.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Information for connecting to source. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -8922,8 +9671,8 @@ def __init__(
selected_databases: List["_models.MigrateSchemaSqlServerSqlDbDatabaseInput"],
encrypted_key_for_secure_fields: Optional[str] = None,
started_on: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Information for connecting to source. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -8954,7 +9703,7 @@ class MigrateSchemaSqlServerSqlDbTaskOutput(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -8981,7 +9730,7 @@ class MigrateSchemaSqlServerSqlDbTaskOutput(_serialization.Model):
}
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -8990,12 +9739,12 @@ def __init__(self, **kwargs):
class MigrateSchemaSqlServerSqlDbTaskOutputDatabaseLevel(
MigrateSchemaSqlServerSqlDbTaskOutput
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigrateSchemaSqlServerSqlDbTaskOutputDatabaseLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -9057,7 +9806,7 @@ class MigrateSchemaSqlServerSqlDbTaskOutputDatabaseLevel(
"file_id": {"key": "fileId", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "DatabaseLevelOutput"
@@ -9073,12 +9822,14 @@ def __init__(self, **kwargs):
self.file_id = None
-class MigrateSchemaSqlServerSqlDbTaskOutputError(MigrateSchemaSqlServerSqlDbTaskOutput):
+class MigrateSchemaSqlServerSqlDbTaskOutputError(
+ MigrateSchemaSqlServerSqlDbTaskOutput
+): # pylint: disable=name-too-long
"""MigrateSchemaSqlServerSqlDbTaskOutputError.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -9104,7 +9855,7 @@ class MigrateSchemaSqlServerSqlDbTaskOutputError(MigrateSchemaSqlServerSqlDbTask
"error_text": {"key": "errorText", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "SchemaErrorOutput"
@@ -9112,12 +9863,14 @@ def __init__(self, **kwargs):
self.error_text = None
-class MigrateSchemaSqlServerSqlDbTaskOutputMigrationLevel(MigrateSchemaSqlServerSqlDbTaskOutput):
+class MigrateSchemaSqlServerSqlDbTaskOutputMigrationLevel(
+ MigrateSchemaSqlServerSqlDbTaskOutput
+): # pylint: disable=name-too-long
"""MigrateSchemaSqlServerSqlDbTaskOutputMigrationLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -9164,7 +9917,7 @@ class MigrateSchemaSqlServerSqlDbTaskOutputMigrationLevel(MigrateSchemaSqlServer
"target_server_brand_version": {"key": "targetServerBrandVersion", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "MigrationLevelOutput"
@@ -9177,12 +9930,12 @@ def __init__(self, **kwargs):
self.target_server_brand_version = None
-class MigrateSchemaSqlServerSqlDbTaskProperties(ProjectTaskProperties):
+class MigrateSchemaSqlServerSqlDbTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long
"""Properties for task that migrates Schema for SQL Server databases to Azure SQL databases.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -9253,8 +10006,8 @@ def __init__(
created_on: Optional[str] = None,
task_id: Optional[str] = None,
is_cloneable: Optional[bool] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -9281,7 +10034,7 @@ class MigrateSchemaSqlTaskOutputError(MigrateSchemaSqlServerSqlDbTaskOutput):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -9303,7 +10056,7 @@ class MigrateSchemaSqlTaskOutputError(MigrateSchemaSqlServerSqlDbTaskOutput):
"error": {"key": "error", "type": "ReportableException"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "ErrorOutput"
@@ -9337,8 +10090,8 @@ def __init__(
restore_database_name: Optional[str] = None,
backup_and_restore_folder: Optional[str] = None,
database_files: Optional[List["_models.DatabaseFileInput"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword name: Name of the database.
:paramtype name: str
@@ -9392,8 +10145,8 @@ def __init__(
table_map: Optional[Dict[str, str]] = None,
schema_setting: Optional[JSON] = None,
id: Optional[str] = None, # pylint: disable=redefined-builtin
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword name: Name of the database.
:paramtype name: str
@@ -9461,8 +10214,8 @@ def __init__(
migration_setting: Optional[Dict[str, str]] = None,
source_setting: Optional[Dict[str, str]] = None,
target_setting: Optional[Dict[str, str]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword id: Unique identifier for database.
:paramtype id: str
@@ -9493,9 +10246,10 @@ def __init__(
class MigrateSqlServerSqlDbSyncTaskInput(SqlMigrationTaskInput):
- """Input for the task that migrates on-prem SQL Server databases to Azure SQL Database for online migrations.
+ """Input for the task that migrates on-prem SQL Server databases to Azure SQL Database for online
+ migrations.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Information for connecting to source. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -9528,8 +10282,8 @@ def __init__(
target_connection_info: "_models.SqlConnectionInfo",
selected_databases: List["_models.MigrateSqlServerSqlDbSyncDatabaseInput"],
validation_options: Optional["_models.MigrationValidationOptions"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Information for connecting to source. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -9549,7 +10303,8 @@ def __init__(
class MigrateSqlServerSqlDbSyncTaskOutput(_serialization.Model):
- """Output for the task that migrates on-prem SQL Server databases to Azure SQL Database for online migrations.
+ """Output for the task that migrates on-prem SQL Server databases to Azure SQL Database for online
+ migrations.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
MigrateSqlServerSqlDbSyncTaskOutputDatabaseError,
@@ -9559,7 +10314,7 @@ class MigrateSqlServerSqlDbSyncTaskOutput(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -9587,19 +10342,21 @@ class MigrateSqlServerSqlDbSyncTaskOutput(_serialization.Model):
}
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
self.result_type: Optional[str] = None
-class MigrateSqlServerSqlDbSyncTaskOutputDatabaseError(MigrateSqlServerSqlDbSyncTaskOutput):
+class MigrateSqlServerSqlDbSyncTaskOutputDatabaseError(
+ MigrateSqlServerSqlDbSyncTaskOutput
+): # pylint: disable=name-too-long
"""MigrateSqlServerSqlDbSyncTaskOutputDatabaseError.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -9628,8 +10385,8 @@ def __init__(
*,
error_message: Optional[str] = None,
events: Optional[List["_models.SyncMigrationDatabaseErrorEvent"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword error_message: Error message.
:paramtype error_message: str
@@ -9644,12 +10401,12 @@ def __init__(
class MigrateSqlServerSqlDbSyncTaskOutputDatabaseLevel(
MigrateSqlServerSqlDbSyncTaskOutput
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigrateSqlServerSqlDbSyncTaskOutputDatabaseLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -9732,7 +10489,7 @@ class MigrateSqlServerSqlDbSyncTaskOutputDatabaseLevel(
"latency": {"key": "latency", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "DatabaseLevelOutput"
@@ -9758,7 +10515,7 @@ class MigrateSqlServerSqlDbSyncTaskOutputError(MigrateSqlServerSqlDbSyncTaskOutp
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -9780,19 +10537,21 @@ class MigrateSqlServerSqlDbSyncTaskOutputError(MigrateSqlServerSqlDbSyncTaskOutp
"error": {"key": "error", "type": "ReportableException"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "ErrorOutput"
self.error = None
-class MigrateSqlServerSqlDbSyncTaskOutputMigrationLevel(MigrateSqlServerSqlDbSyncTaskOutput):
+class MigrateSqlServerSqlDbSyncTaskOutputMigrationLevel(
+ MigrateSqlServerSqlDbSyncTaskOutput
+): # pylint: disable=name-too-long
"""MigrateSqlServerSqlDbSyncTaskOutputMigrationLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -9838,7 +10597,7 @@ class MigrateSqlServerSqlDbSyncTaskOutputMigrationLevel(MigrateSqlServerSqlDbSyn
"database_count": {"key": "databaseCount", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "MigrationLevelOutput"
@@ -9853,12 +10612,12 @@ def __init__(self, **kwargs):
class MigrateSqlServerSqlDbSyncTaskOutputTableLevel(
MigrateSqlServerSqlDbSyncTaskOutput
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigrateSqlServerSqlDbSyncTaskOutputTableLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -9929,7 +10688,7 @@ class MigrateSqlServerSqlDbSyncTaskOutputTableLevel(
"last_modified_time": {"key": "lastModifiedTime", "type": "iso-8601"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "TableLevelOutput"
@@ -9949,11 +10708,12 @@ def __init__(self, **kwargs):
class MigrateSqlServerSqlDbSyncTaskProperties(ProjectTaskProperties):
- """Properties for the task that migrates on-prem SQL Server databases to Azure SQL Database for online migrations.
+ """Properties for the task that migrates on-prem SQL Server databases to Azure SQL Database for
+ online migrations.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -10012,8 +10772,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.MigrateSqlServerSqlDbSyncTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -10029,7 +10789,7 @@ def __init__(
class MigrateSqlServerSqlDbTaskInput(SqlMigrationTaskInput):
"""Input for the task that migrates on-prem SQL Server databases to Azure SQL Database.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Information for connecting to source. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -10078,8 +10838,8 @@ def __init__(
validation_options: Optional["_models.MigrationValidationOptions"] = None,
started_on: Optional[str] = None,
encrypted_key_for_secure_fields: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Information for connecting to source. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -10123,7 +10883,7 @@ class MigrateSqlServerSqlDbTaskOutput(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -10152,21 +10912,19 @@ class MigrateSqlServerSqlDbTaskOutput(_serialization.Model):
}
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
self.result_type: Optional[str] = None
-class MigrateSqlServerSqlDbTaskOutputDatabaseLevel(
- MigrateSqlServerSqlDbTaskOutput
-): # pylint: disable=too-many-instance-attributes
+class MigrateSqlServerSqlDbTaskOutputDatabaseLevel(MigrateSqlServerSqlDbTaskOutput): # pylint: disable=name-too-long
"""MigrateSqlServerSqlDbTaskOutputDatabaseLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -10243,7 +11001,7 @@ class MigrateSqlServerSqlDbTaskOutputDatabaseLevel(
"object_summary": {"key": "objectSummary", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "DatabaseLevelOutput"
@@ -10330,7 +11088,7 @@ class MigrationValidationDatabaseLevelResult(_serialization.Model):
"status": {"key": "status", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -10347,12 +11105,12 @@ def __init__(self, **kwargs):
class MigrateSqlServerSqlDbTaskOutputDatabaseLevelValidationResult(
MigrateSqlServerSqlDbTaskOutput, MigrationValidationDatabaseLevelResult
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigrateSqlServerSqlDbTaskOutputDatabaseLevelValidationResult.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar migration_id: Migration Identifier.
:vartype migration_id: str
@@ -10420,7 +11178,7 @@ class MigrateSqlServerSqlDbTaskOutputDatabaseLevelValidationResult(
"result_type": {"key": "resultType", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.migration_id = None
@@ -10441,7 +11199,7 @@ class MigrateSqlServerSqlDbTaskOutputError(MigrateSqlServerSqlDbTaskOutput):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -10463,21 +11221,19 @@ class MigrateSqlServerSqlDbTaskOutputError(MigrateSqlServerSqlDbTaskOutput):
"error": {"key": "error", "type": "ReportableException"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "ErrorOutput"
self.error = None
-class MigrateSqlServerSqlDbTaskOutputMigrationLevel(
- MigrateSqlServerSqlDbTaskOutput
-): # pylint: disable=too-many-instance-attributes
+class MigrateSqlServerSqlDbTaskOutputMigrationLevel(MigrateSqlServerSqlDbTaskOutput): # pylint: disable=name-too-long
"""MigrateSqlServerSqlDbTaskOutputMigrationLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -10562,8 +11318,8 @@ def __init__(
*,
migration_validation_result: Optional["_models.MigrationValidationResult"] = None,
migration_report_result: Optional["_models.MigrationReportResult"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword migration_validation_result: Migration Validation Results.
:paramtype migration_validation_result:
@@ -10591,14 +11347,12 @@ def __init__(
self.exceptions_and_warnings = None
-class MigrateSqlServerSqlDbTaskOutputTableLevel(
- MigrateSqlServerSqlDbTaskOutput
-): # pylint: disable=too-many-instance-attributes
+class MigrateSqlServerSqlDbTaskOutputTableLevel(MigrateSqlServerSqlDbTaskOutput): # pylint: disable=name-too-long
"""MigrateSqlServerSqlDbTaskOutputTableLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -10654,7 +11408,7 @@ class MigrateSqlServerSqlDbTaskOutputTableLevel(
"result_prefix": {"key": "resultPrefix", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "TableLevelOutput"
@@ -10704,8 +11458,8 @@ def __init__(
self,
*,
summary_results: Optional[Dict[str, "_models.MigrationValidationDatabaseSummaryResult"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword summary_results: Validation summary results for each database.
:paramtype summary_results: dict[str,
@@ -10718,12 +11472,14 @@ def __init__(
self.status = None
-class MigrateSqlServerSqlDbTaskOutputValidationResult(MigrateSqlServerSqlDbTaskOutput, MigrationValidationResult):
+class MigrateSqlServerSqlDbTaskOutputValidationResult(
+ MigrateSqlServerSqlDbTaskOutput, MigrationValidationResult
+): # pylint: disable=name-too-long
"""MigrateSqlServerSqlDbTaskOutputValidationResult.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar migration_id: Migration Identifier.
:vartype migration_id: str
@@ -10759,8 +11515,8 @@ def __init__(
self,
*,
summary_results: Optional[Dict[str, "_models.MigrationValidationDatabaseSummaryResult"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword summary_results: Validation summary results for each database.
:paramtype summary_results: dict[str,
@@ -10779,7 +11535,7 @@ class MigrateSqlServerSqlDbTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -10850,8 +11606,8 @@ def __init__(
task_id: Optional[str] = None,
is_cloneable: Optional[bool] = None,
created_on: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -10876,7 +11632,7 @@ def __init__(
class MigrateSqlServerSqlMIDatabaseInput(_serialization.Model):
"""Database specific information for SQL to Azure SQL DB Managed Instance migration task inputs.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar name: Name of the database. Required.
:vartype name: str
@@ -10911,8 +11667,8 @@ def __init__(
backup_file_share: Optional["_models.FileShare"] = None,
backup_file_paths: Optional[List[str]] = None,
id: Optional[str] = None, # pylint: disable=redefined-builtin
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword name: Name of the database. Required.
:paramtype name: str
@@ -10934,9 +11690,10 @@ def __init__(
class SqlServerSqlMISyncTaskInput(_serialization.Model):
- """Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online scenario.
+ """Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online
+ scenario.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar selected_databases: Databases to migrate. Required.
:vartype selected_databases:
@@ -10950,8 +11707,9 @@ class SqlServerSqlMISyncTaskInput(_serialization.Model):
:ivar target_connection_info: Connection information for Azure SQL Database Managed Instance.
Required.
:vartype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo
- :ivar azure_app: Azure Active Directory Application the DMS instance will use to connect to the
- target instance of Azure SQL Database Managed Instance and the Azure Storage Account. Required.
+ :ivar azure_app: Azure Active Directory Application the DMS (classic) instance will use to
+ connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage
+ Account. Required.
:vartype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp
"""
@@ -10981,8 +11739,8 @@ def __init__(
target_connection_info: "_models.MiSqlConnectionInfo",
azure_app: "_models.AzureActiveDirectoryApp",
backup_file_share: Optional["_models.FileShare"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword selected_databases: Databases to migrate. Required.
:paramtype selected_databases:
@@ -10996,9 +11754,9 @@ def __init__(
:keyword target_connection_info: Connection information for Azure SQL Database Managed
Instance. Required.
:paramtype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo
- :keyword azure_app: Azure Active Directory Application the DMS instance will use to connect to
- the target instance of Azure SQL Database Managed Instance and the Azure Storage Account.
- Required.
+ :keyword azure_app: Azure Active Directory Application the DMS (classic) instance will use to
+ connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage
+ Account. Required.
:paramtype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp
"""
super().__init__(**kwargs)
@@ -11011,9 +11769,10 @@ def __init__(
class MigrateSqlServerSqlMISyncTaskInput(SqlServerSqlMISyncTaskInput):
- """Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online scenario.
+ """Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online
+ scenario.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar selected_databases: Databases to migrate. Required.
:vartype selected_databases:
@@ -11027,8 +11786,9 @@ class MigrateSqlServerSqlMISyncTaskInput(SqlServerSqlMISyncTaskInput):
:ivar target_connection_info: Connection information for Azure SQL Database Managed Instance.
Required.
:vartype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo
- :ivar azure_app: Azure Active Directory Application the DMS instance will use to connect to the
- target instance of Azure SQL Database Managed Instance and the Azure Storage Account. Required.
+ :ivar azure_app: Azure Active Directory Application the DMS (classic) instance will use to
+ connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage
+ Account. Required.
:vartype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp
:ivar number_of_parallel_database_migrations: Number of database migrations to start in
parallel.
@@ -11063,8 +11823,8 @@ def __init__(
azure_app: "_models.AzureActiveDirectoryApp",
backup_file_share: Optional["_models.FileShare"] = None,
number_of_parallel_database_migrations: Optional[float] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword selected_databases: Databases to migrate. Required.
:paramtype selected_databases:
@@ -11078,9 +11838,9 @@ def __init__(
:keyword target_connection_info: Connection information for Azure SQL Database Managed
Instance. Required.
:paramtype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo
- :keyword azure_app: Azure Active Directory Application the DMS instance will use to connect to
- the target instance of Azure SQL Database Managed Instance and the Azure Storage Account.
- Required.
+ :keyword azure_app: Azure Active Directory Application the DMS (classic) instance will use to
+ connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage
+ Account. Required.
:paramtype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp
:keyword number_of_parallel_database_migrations: Number of database migrations to start in
parallel.
@@ -11099,7 +11859,8 @@ def __init__(
class MigrateSqlServerSqlMISyncTaskOutput(_serialization.Model):
- """Output for task that migrates SQL Server databases to Azure SQL Database Managed Instance using Log Replay Service.
+ """Output for task that migrates SQL Server databases to Azure SQL Database Managed Instance using
+ Log Replay Service.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
MigrateSqlServerSqlMISyncTaskOutputDatabaseLevel, MigrateSqlServerSqlMISyncTaskOutputError,
@@ -11107,7 +11868,7 @@ class MigrateSqlServerSqlMISyncTaskOutput(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -11133,7 +11894,7 @@ class MigrateSqlServerSqlMISyncTaskOutput(_serialization.Model):
}
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -11142,12 +11903,12 @@ def __init__(self, **kwargs):
class MigrateSqlServerSqlMISyncTaskOutputDatabaseLevel(
MigrateSqlServerSqlMISyncTaskOutput
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigrateSqlServerSqlMISyncTaskOutputDatabaseLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -11214,7 +11975,7 @@ class MigrateSqlServerSqlMISyncTaskOutputDatabaseLevel(
"exceptions_and_warnings": {"key": "exceptionsAndWarnings", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "DatabaseLevelOutput"
@@ -11236,7 +11997,7 @@ class MigrateSqlServerSqlMISyncTaskOutputError(MigrateSqlServerSqlMISyncTaskOutp
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -11258,7 +12019,7 @@ class MigrateSqlServerSqlMISyncTaskOutputError(MigrateSqlServerSqlMISyncTaskOutp
"error": {"key": "error", "type": "ReportableException"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "ErrorOutput"
@@ -11267,12 +12028,12 @@ def __init__(self, **kwargs):
class MigrateSqlServerSqlMISyncTaskOutputMigrationLevel(
MigrateSqlServerSqlMISyncTaskOutput
-): # pylint: disable=too-many-instance-attributes
+): # pylint: disable=name-too-long
"""MigrateSqlServerSqlMISyncTaskOutputMigrationLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -11335,7 +12096,7 @@ class MigrateSqlServerSqlMISyncTaskOutputMigrationLevel(
"database_error_count": {"key": "databaseErrorCount", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "MigrationLevelOutput"
@@ -11353,11 +12114,12 @@ def __init__(self, **kwargs):
class MigrateSqlServerSqlMISyncTaskProperties(ProjectTaskProperties):
- """Properties for task that migrates SQL Server databases to Azure SQL Database Managed Instance sync scenario.
+ """Properties for task that migrates SQL Server databases to Azure SQL Database Managed Instance
+ sync scenario.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -11420,8 +12182,8 @@ def __init__(
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.MigrateSqlServerSqlMISyncTaskInput"] = None,
created_on: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -11437,10 +12199,10 @@ def __init__(
self.created_on = created_on
-class MigrateSqlServerSqlMITaskInput(SqlMigrationTaskInput): # pylint: disable=too-many-instance-attributes
+class MigrateSqlServerSqlMITaskInput(SqlMigrationTaskInput):
"""Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Information for connecting to source. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -11507,8 +12269,8 @@ def __init__(
backup_mode: Optional[Union[str, "_models.BackupMode"]] = None,
aad_domain_name: Optional[str] = None,
encrypted_key_for_secure_fields: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Information for connecting to source. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -11563,7 +12325,7 @@ class MigrateSqlServerSqlMITaskOutput(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -11591,19 +12353,19 @@ class MigrateSqlServerSqlMITaskOutput(_serialization.Model):
}
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
self.result_type: Optional[str] = None
-class MigrateSqlServerSqlMITaskOutputAgentJobLevel(MigrateSqlServerSqlMITaskOutput):
+class MigrateSqlServerSqlMITaskOutputAgentJobLevel(MigrateSqlServerSqlMITaskOutput): # pylint: disable=name-too-long
"""MigrateSqlServerSqlMITaskOutputAgentJobLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -11650,7 +12412,7 @@ class MigrateSqlServerSqlMITaskOutputAgentJobLevel(MigrateSqlServerSqlMITaskOutp
"exceptions_and_warnings": {"key": "exceptionsAndWarnings", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "AgentJobLevelOutput"
@@ -11663,12 +12425,12 @@ def __init__(self, **kwargs):
self.exceptions_and_warnings = None
-class MigrateSqlServerSqlMITaskOutputDatabaseLevel(MigrateSqlServerSqlMITaskOutput):
+class MigrateSqlServerSqlMITaskOutputDatabaseLevel(MigrateSqlServerSqlMITaskOutput): # pylint: disable=name-too-long
"""MigrateSqlServerSqlMITaskOutputDatabaseLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -11720,7 +12482,7 @@ class MigrateSqlServerSqlMITaskOutputDatabaseLevel(MigrateSqlServerSqlMITaskOutp
"exceptions_and_warnings": {"key": "exceptionsAndWarnings", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "DatabaseLevelOutput"
@@ -11739,7 +12501,7 @@ class MigrateSqlServerSqlMITaskOutputError(MigrateSqlServerSqlMITaskOutput):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -11761,19 +12523,19 @@ class MigrateSqlServerSqlMITaskOutputError(MigrateSqlServerSqlMITaskOutput):
"error": {"key": "error", "type": "ReportableException"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "ErrorOutput"
self.error = None
-class MigrateSqlServerSqlMITaskOutputLoginLevel(MigrateSqlServerSqlMITaskOutput):
+class MigrateSqlServerSqlMITaskOutputLoginLevel(MigrateSqlServerSqlMITaskOutput): # pylint: disable=name-too-long
"""MigrateSqlServerSqlMITaskOutputLoginLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -11822,7 +12584,7 @@ class MigrateSqlServerSqlMITaskOutputLoginLevel(MigrateSqlServerSqlMITaskOutput)
"exceptions_and_warnings": {"key": "exceptionsAndWarnings", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "LoginLevelOutput"
@@ -11835,14 +12597,12 @@ def __init__(self, **kwargs):
self.exceptions_and_warnings = None
-class MigrateSqlServerSqlMITaskOutputMigrationLevel(
- MigrateSqlServerSqlMITaskOutput
-): # pylint: disable=too-many-instance-attributes
+class MigrateSqlServerSqlMITaskOutputMigrationLevel(MigrateSqlServerSqlMITaskOutput): # pylint: disable=name-too-long
"""MigrateSqlServerSqlMITaskOutputMigrationLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -11923,7 +12683,7 @@ class MigrateSqlServerSqlMITaskOutputMigrationLevel(
"exceptions_and_warnings": {"key": "exceptionsAndWarnings", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "MigrationLevelOutput"
@@ -11944,12 +12704,12 @@ def __init__(self, **kwargs):
self.exceptions_and_warnings = None
-class MigrateSqlServerSqlMITaskProperties(ProjectTaskProperties): # pylint: disable=too-many-instance-attributes
+class MigrateSqlServerSqlMITaskProperties(ProjectTaskProperties):
"""Properties for task that migrates SQL Server databases to Azure SQL Database Managed Instance.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -12024,8 +12784,8 @@ def __init__(
created_on: Optional[str] = None,
parent_task_id: Optional[str] = None,
is_cloneable: Optional[bool] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -12051,9 +12811,10 @@ def __init__(
class MigrateSsisTaskInput(SqlMigrationTaskInput):
- """Input for task that migrates SSIS packages from SQL Server to Azure SQL Database Managed Instance.
+ """Input for task that migrates SSIS packages from SQL Server to Azure SQL Database Managed
+ Instance.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Information for connecting to source. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -12081,8 +12842,8 @@ def __init__(
source_connection_info: "_models.SqlConnectionInfo",
target_connection_info: "_models.SqlConnectionInfo",
ssis_migration_info: "_models.SsisMigrationInfo",
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Information for connecting to source. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -12098,14 +12859,15 @@ def __init__(
class MigrateSsisTaskOutput(_serialization.Model):
- """Output for task that migrates SSIS packages from SQL Server to Azure SQL Database Managed Instance.
+ """Output for task that migrates SSIS packages from SQL Server to Azure SQL Database Managed
+ Instance.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
MigrateSsisTaskOutputMigrationLevel, MigrateSsisTaskOutputProjectLevel
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -12130,19 +12892,19 @@ class MigrateSsisTaskOutput(_serialization.Model):
}
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
self.result_type: Optional[str] = None
-class MigrateSsisTaskOutputMigrationLevel(MigrateSsisTaskOutput): # pylint: disable=too-many-instance-attributes
+class MigrateSsisTaskOutputMigrationLevel(MigrateSsisTaskOutput):
"""MigrateSsisTaskOutputMigrationLevel.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -12203,7 +12965,7 @@ class MigrateSsisTaskOutputMigrationLevel(MigrateSsisTaskOutput): # pylint: dis
"stage": {"key": "stage", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "MigrationLevelOutput"
@@ -12224,7 +12986,7 @@ class MigrateSsisTaskOutputProjectLevel(MigrateSsisTaskOutput):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Result identifier.
:vartype id: str
@@ -12276,7 +13038,7 @@ class MigrateSsisTaskOutputProjectLevel(MigrateSsisTaskOutput):
"exceptions_and_warnings": {"key": "exceptionsAndWarnings", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_type: str = "SsisProjectLevelOutput"
@@ -12291,11 +13053,12 @@ def __init__(self, **kwargs):
class MigrateSsisTaskProperties(ProjectTaskProperties):
- """Properties for task that migrates SSIS packages from SQL Server databases to Azure SQL Database Managed Instance.
+ """Properties for task that migrates SSIS packages from SQL Server databases to Azure SQL Database
+ Managed Instance.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -12354,8 +13117,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.MigrateSsisTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -12371,7 +13134,7 @@ def __init__(
class MigrateSyncCompleteCommandInput(_serialization.Model):
"""Input for command that completes sync migration for a database.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar database_name: Name of database. Required.
:vartype database_name: str
@@ -12388,7 +13151,9 @@ class MigrateSyncCompleteCommandInput(_serialization.Model):
"commit_time_stamp": {"key": "commitTimeStamp", "type": "iso-8601"},
}
- def __init__(self, *, database_name: str, commit_time_stamp: Optional[datetime.datetime] = None, **kwargs):
+ def __init__(
+ self, *, database_name: str, commit_time_stamp: Optional[datetime.datetime] = None, **kwargs: Any
+ ) -> None:
"""
:keyword database_name: Name of database. Required.
:paramtype database_name: str
@@ -12421,7 +13186,7 @@ class MigrateSyncCompleteCommandOutput(_serialization.Model):
"errors": {"key": "errors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -12433,7 +13198,7 @@ class MigrateSyncCompleteCommandProperties(CommandProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar command_type: Command type. Required. Known values are: "Migrate.Sync.Complete.Database",
"Migrate.SqlServer.AzureDbSqlMi.Complete", "cancel", "finish", and "restart".
@@ -12472,8 +13237,8 @@ def __init__(
*,
input: Optional["_models.MigrateSyncCompleteCommandInput"] = None,
command_id: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword input: Command input.
:paramtype input: ~azure.mgmt.datamigration.models.MigrateSyncCompleteCommandInput
@@ -12508,66 +13273,230 @@ class MigrationEligibilityInfo(_serialization.Model):
"validation_messages": {"key": "validationMessages", "type": "[str]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.is_eligible_for_migration = None
self.validation_messages = None
-class MigrationOperationInput(_serialization.Model):
- """Migration Operation Input.
+class MigrationOperationInput(_serialization.Model):
+ """Migration Operation Input.
+
+ :ivar migration_operation_id: ID tracking migration operation.
+ :vartype migration_operation_id: str
+ """
+
+ _attribute_map = {
+ "migration_operation_id": {"key": "migrationOperationId", "type": "str"},
+ }
+
+ def __init__(self, *, migration_operation_id: Optional[str] = None, **kwargs: Any) -> None:
+ """
+ :keyword migration_operation_id: ID tracking migration operation.
+ :paramtype migration_operation_id: str
+ """
+ super().__init__(**kwargs)
+ self.migration_operation_id = migration_operation_id
+
+
+class MigrationReportResult(_serialization.Model):
+ """Migration validation report result, contains the url for downloading the generated report.
+
+ :ivar id: Migration validation result identifier.
+ :vartype id: str
+ :ivar report_url: The url of the report.
+ :vartype report_url: str
+ """
+
+ _attribute_map = {
+ "id": {"key": "id", "type": "str"},
+ "report_url": {"key": "reportUrl", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: Optional[str] = None, # pylint: disable=redefined-builtin
+ report_url: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword id: Migration validation result identifier.
+ :paramtype id: str
+ :keyword report_url: The url of the report.
+ :paramtype report_url: str
+ """
+ super().__init__(**kwargs)
+ self.id = id
+ self.report_url = report_url
+
+
+class TrackedResource(Resource):
+ """The resource model definition for an Azure Resource Manager tracked top level resource which
+ has 'tags' and a 'location'.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
+ information.
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
+ :ivar tags: Resource tags.
+ :vartype tags: dict[str, str]
+ :ivar location: The geo-location where the resource lives. Required.
+ :vartype location: str
+ """
+
+ _validation = {
+ "id": {"readonly": True},
+ "name": {"readonly": True},
+ "type": {"readonly": True},
+ "system_data": {"readonly": True},
+ "location": {"required": True},
+ }
+
+ _attribute_map = {
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "type": {"key": "type", "type": "str"},
+ "system_data": {"key": "systemData", "type": "SystemData"},
+ "tags": {"key": "tags", "type": "{str}"},
+ "location": {"key": "location", "type": "str"},
+ }
+
+ def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None:
+ """
+ :keyword tags: Resource tags.
+ :paramtype tags: dict[str, str]
+ :keyword location: The geo-location where the resource lives. Required.
+ :paramtype location: str
+ """
+ super().__init__(**kwargs)
+ self.tags = tags
+ self.location = location
+
+
+class MigrationService(TrackedResource):
+ """A Migration Service.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
+ information.
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
+ :ivar tags: Resource tags.
+ :vartype tags: dict[str, str]
+ :ivar location: The geo-location where the resource lives. Required.
+ :vartype location: str
+ :ivar provisioning_state: Provisioning state to track the async operation status. Known values
+ are: "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled".
+ :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState
+ :ivar integration_runtime_state: Current state of the Integration runtime.
+ :vartype integration_runtime_state: str
+ """
+
+ _validation = {
+ "id": {"readonly": True},
+ "name": {"readonly": True},
+ "type": {"readonly": True},
+ "system_data": {"readonly": True},
+ "location": {"required": True},
+ "provisioning_state": {"readonly": True},
+ "integration_runtime_state": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "type": {"key": "type", "type": "str"},
+ "system_data": {"key": "systemData", "type": "SystemData"},
+ "tags": {"key": "tags", "type": "{str}"},
+ "location": {"key": "location", "type": "str"},
+ "provisioning_state": {"key": "properties.provisioningState", "type": "str"},
+ "integration_runtime_state": {"key": "properties.integrationRuntimeState", "type": "str"},
+ }
+
+ def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None:
+ """
+ :keyword tags: Resource tags.
+ :paramtype tags: dict[str, str]
+ :keyword location: The geo-location where the resource lives. Required.
+ :paramtype location: str
+ """
+ super().__init__(tags=tags, location=location, **kwargs)
+ self.provisioning_state = None
+ self.integration_runtime_state = None
+
+
+class MigrationServiceListResult(_serialization.Model):
+ """A list of Migration Service.
- :ivar migration_operation_id: ID tracking migration operation.
- :vartype migration_operation_id: str
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value:
+ :vartype value: list[~azure.mgmt.datamigration.models.MigrationService]
+ :ivar next_link:
+ :vartype next_link: str
"""
+ _validation = {
+ "value": {"readonly": True},
+ "next_link": {"readonly": True},
+ }
+
_attribute_map = {
- "migration_operation_id": {"key": "migrationOperationId", "type": "str"},
+ "value": {"key": "value", "type": "[MigrationService]"},
+ "next_link": {"key": "nextLink", "type": "str"},
}
- def __init__(self, *, migration_operation_id: Optional[str] = None, **kwargs):
- """
- :keyword migration_operation_id: ID tracking migration operation.
- :paramtype migration_operation_id: str
- """
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
super().__init__(**kwargs)
- self.migration_operation_id = migration_operation_id
+ self.value = None
+ self.next_link = None
-class MigrationReportResult(_serialization.Model):
- """Migration validation report result, contains the url for downloading the generated report.
+class MigrationServiceUpdate(_serialization.Model):
+ """An update to a Migration Service.
- :ivar id: Migration validation result identifier.
- :vartype id: str
- :ivar report_url: The url of the report.
- :vartype report_url: str
+ :ivar tags: Dictionary of :code:``.
+ :vartype tags: dict[str, str]
"""
_attribute_map = {
- "id": {"key": "id", "type": "str"},
- "report_url": {"key": "reportUrl", "type": "str"},
+ "tags": {"key": "tags", "type": "{str}"},
}
- def __init__(
- self,
- *,
- id: Optional[str] = None, # pylint: disable=redefined-builtin
- report_url: Optional[str] = None,
- **kwargs
- ):
+ def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None:
"""
- :keyword id: Migration validation result identifier.
- :paramtype id: str
- :keyword report_url: The url of the report.
- :paramtype report_url: str
+ :keyword tags: Dictionary of :code:``.
+ :paramtype tags: dict[str, str]
"""
super().__init__(**kwargs)
- self.id = id
- self.report_url = report_url
+ self.tags = tags
-class MigrationStatusDetails(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class MigrationStatusDetails(_serialization.Model):
"""Detailed status of current migration.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -12633,7 +13562,7 @@ class MigrationStatusDetails(_serialization.Model): # pylint: disable=too-many-
"pending_log_backups_count": {"key": "pendingLogBackupsCount", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.migration_state = None
@@ -12672,7 +13601,7 @@ class MigrationTableMetadata(_serialization.Model):
"target_table_name": {"key": "targetTableName", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.source_table_name = None
@@ -12722,7 +13651,7 @@ class MigrationValidationDatabaseSummaryResult(_serialization.Model):
"status": {"key": "status", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -12762,8 +13691,8 @@ def __init__(
enable_schema_validation: Optional[bool] = None,
enable_data_integrity_validation: Optional[bool] = None,
enable_query_analysis_validation: Optional[bool] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword enable_schema_validation: Allows to compare the schema information between source and
target.
@@ -12786,7 +13715,7 @@ def __init__(
class MiSqlConnectionInfo(ConnectionInfo):
"""Properties required to create a connection to Azure SQL database Managed instance.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar type: Type of connection info. Required.
:vartype type: str
@@ -12817,8 +13746,8 @@ def __init__(
managed_instance_resource_id: str,
user_name: Optional[str] = None,
password: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword user_name: User name.
:paramtype user_name: str
@@ -12833,12 +13762,72 @@ def __init__(
self.managed_instance_resource_id = managed_instance_resource_id
+class MongoConnectionInformation(_serialization.Model):
+ """Mongo Connection.
+
+ :ivar host: Host of mongo connection.
+ :vartype host: str
+ :ivar port: Port of mongo connection.
+ :vartype port: int
+ :ivar user_name: User name to connect to Mongo.
+ :vartype user_name: str
+ :ivar password: Password to connect to Mongo.
+ :vartype password: str
+ :ivar use_ssl: Whether to UseSsl or UseTls to connect to Mongo. Default is true.
+ :vartype use_ssl: bool
+ :ivar connection_string: ConnectionString to connect to Mongo.
+ :vartype connection_string: str
+ """
+
+ _attribute_map = {
+ "host": {"key": "host", "type": "str"},
+ "port": {"key": "port", "type": "int"},
+ "user_name": {"key": "userName", "type": "str"},
+ "password": {"key": "password", "type": "str"},
+ "use_ssl": {"key": "useSsl", "type": "bool"},
+ "connection_string": {"key": "connectionString", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ host: Optional[str] = None,
+ port: Optional[int] = None,
+ user_name: Optional[str] = None,
+ password: Optional[str] = None,
+ use_ssl: Optional[bool] = None,
+ connection_string: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword host: Host of mongo connection.
+ :paramtype host: str
+ :keyword port: Port of mongo connection.
+ :paramtype port: int
+ :keyword user_name: User name to connect to Mongo.
+ :paramtype user_name: str
+ :keyword password: Password to connect to Mongo.
+ :paramtype password: str
+ :keyword use_ssl: Whether to UseSsl or UseTls to connect to Mongo. Default is true.
+ :paramtype use_ssl: bool
+ :keyword connection_string: ConnectionString to connect to Mongo.
+ :paramtype connection_string: str
+ """
+ super().__init__(**kwargs)
+ self.host = host
+ self.port = port
+ self.user_name = user_name
+ self.password = password
+ self.use_ssl = use_ssl
+ self.connection_string = connection_string
+
+
class MongoDbCancelCommand(CommandProperties):
"""Properties for the command that cancels a migration in whole or in part.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar command_type: Command type. Required. Known values are: "Migrate.Sync.Complete.Database",
"Migrate.SqlServer.AzureDbSqlMi.Complete", "cancel", "finish", and "restart".
@@ -12865,7 +13854,7 @@ class MongoDbCancelCommand(CommandProperties):
"input": {"key": "input", "type": "MongoDbCommandInput"},
}
- def __init__(self, *, input: Optional["_models.MongoDbCommandInput"] = None, **kwargs):
+ def __init__(self, *, input: Optional["_models.MongoDbCommandInput"] = None, **kwargs: Any) -> None:
"""
:keyword input: Command input.
:paramtype input: ~azure.mgmt.datamigration.models.MongoDbCommandInput
@@ -12878,7 +13867,7 @@ def __init__(self, *, input: Optional["_models.MongoDbCommandInput"] = None, **k
class MongoDbClusterInfo(_serialization.Model):
"""Describes a MongoDB data source.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar databases: A list of non-system databases in the cluster. Required.
:vartype databases: list[~azure.mgmt.datamigration.models.MongoDbDatabaseInfo]
@@ -12913,8 +13902,8 @@ def __init__(
supports_sharding: bool,
type: Union[str, "_models.MongoDbClusterType"],
version: str,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword databases: A list of non-system databases in the cluster. Required.
:paramtype databases: list[~azure.mgmt.datamigration.models.MongoDbDatabaseInfo]
@@ -12937,7 +13926,7 @@ def __init__(
class MongoDbObjectInfo(_serialization.Model):
"""Describes a database or collection within a MongoDB data source.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar average_document_size: The average document size, or -1 if the average size is unknown.
Required.
@@ -12979,8 +13968,8 @@ def __init__(
document_count: int,
name: str,
qualified_name: str,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword average_document_size: The average document size, or -1 if the average size is
unknown. Required.
@@ -13005,10 +13994,10 @@ def __init__(
self.qualified_name = qualified_name
-class MongoDbCollectionInfo(MongoDbObjectInfo): # pylint: disable=too-many-instance-attributes
+class MongoDbCollectionInfo(MongoDbObjectInfo):
"""Describes a supported collection within a MongoDB database.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar average_document_size: The average document size, or -1 if the average size is unknown.
Required.
@@ -13085,8 +14074,8 @@ def __init__(
supports_sharding: bool,
shard_key: Optional["_models.MongoDbShardKeyInfo"] = None,
view_of: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword average_document_size: The average document size, or -1 if the average size is
unknown. Required.
@@ -13136,13 +14125,13 @@ def __init__(
self.view_of = view_of
-class MongoDbProgress(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class MongoDbProgress(_serialization.Model):
"""Base class for MongoDB migration outputs.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
MongoDbCollectionProgress, MongoDbDatabaseProgress, MongoDbMigrationProgress
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar bytes_copied: The number of document bytes copied during the Copying stage. Required.
:vartype bytes_copied: int
@@ -13241,8 +14230,8 @@ def __init__(
last_replay_time: Optional[datetime.datetime] = None,
name: Optional[str] = None,
qualified_name: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword bytes_copied: The number of document bytes copied during the Copying stage. Required.
:paramtype bytes_copied: int
@@ -13300,10 +14289,10 @@ def __init__(
self.total_documents = total_documents
-class MongoDbCollectionProgress(MongoDbProgress): # pylint: disable=too-many-instance-attributes
+class MongoDbCollectionProgress(MongoDbProgress):
"""Describes the progress of a collection.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar bytes_copied: The number of document bytes copied during the Copying stage. Required.
:vartype bytes_copied: int
@@ -13394,8 +14383,8 @@ def __init__(
last_replay_time: Optional[datetime.datetime] = None,
name: Optional[str] = None,
qualified_name: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword bytes_copied: The number of document bytes copied during the Copying stage. Required.
:paramtype bytes_copied: int
@@ -13480,8 +14469,8 @@ def __init__(
can_delete: Optional[bool] = None,
shard_key: Optional["_models.MongoDbShardKeySetting"] = None,
target_r_us: Optional[int] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword can_delete: Whether the migrator is allowed to drop the target collection in the
course of performing a migration. The default is true.
@@ -13510,7 +14499,7 @@ class MongoDbCommandInput(_serialization.Model):
"object_name": {"key": "objectName", "type": "str"},
}
- def __init__(self, *, object_name: Optional[str] = None, **kwargs):
+ def __init__(self, *, object_name: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword object_name: The qualified name of a database or collection to act upon, or null to
act upon the entire migration.
@@ -13520,10 +14509,10 @@ def __init__(self, *, object_name: Optional[str] = None, **kwargs):
self.object_name = object_name
-class MongoDbConnectionInfo(ConnectionInfo): # pylint: disable=too-many-instance-attributes
+class MongoDbConnectionInfo(ConnectionInfo):
"""Describes a connection to a MongoDB data source.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar type: Type of connection info. Required.
:vartype type: str
@@ -13596,8 +14585,8 @@ def __init__(
port: Optional[int] = None,
additional_settings: Optional[str] = None,
authentication: Optional[Union[str, "_models.AuthenticationType"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword user_name: User name.
:paramtype user_name: str
@@ -13647,7 +14636,7 @@ def __init__(
class MongoDbDatabaseInfo(MongoDbObjectInfo):
"""Describes a database within a MongoDB data source.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar average_document_size: The average document size, or -1 if the average size is unknown.
Required.
@@ -13700,8 +14689,8 @@ def __init__(
qualified_name: str,
collections: List["_models.MongoDbCollectionInfo"],
supports_sharding: bool,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword average_document_size: The average document size, or -1 if the average size is
unknown. Required.
@@ -13735,10 +14724,10 @@ def __init__(
self.supports_sharding = supports_sharding
-class MongoDbDatabaseProgress(MongoDbProgress): # pylint: disable=too-many-instance-attributes
+class MongoDbDatabaseProgress(MongoDbProgress):
"""Describes the progress of a database.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar bytes_copied: The number of document bytes copied during the Copying stage. Required.
:vartype bytes_copied: int
@@ -13834,8 +14823,8 @@ def __init__(
name: Optional[str] = None,
qualified_name: Optional[str] = None,
collections: Optional[Dict[str, "_models.MongoDbCollectionProgress"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword bytes_copied: The number of document bytes copied during the Copying stage. Required.
:paramtype bytes_copied: int
@@ -13902,7 +14891,7 @@ def __init__(
class MongoDbDatabaseSettings(_serialization.Model):
"""Describes how an individual MongoDB database should be migrated.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar collections: The collections on the source database to migrate to the target. The keys
are the unqualified names of the collections. Required.
@@ -13927,8 +14916,8 @@ def __init__(
*,
collections: Dict[str, "_models.MongoDbCollectionSettings"],
target_r_us: Optional[int] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword collections: The collections on the source database to migrate to the target. The keys
are the unqualified names of the collections. Required.
@@ -13971,8 +14960,8 @@ def __init__(
count: Optional[int] = None,
message: Optional[str] = None,
type: Optional[Union[str, "_models.MongoDbErrorType"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword code: The non-localized, machine-readable code that describes the error or warning.
:paramtype code: str
@@ -13996,7 +14985,7 @@ class MongoDbFinishCommand(CommandProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar command_type: Command type. Required. Known values are: "Migrate.Sync.Complete.Database",
"Migrate.SqlServer.AzureDbSqlMi.Complete", "cancel", "finish", and "restart".
@@ -14023,7 +15012,7 @@ class MongoDbFinishCommand(CommandProperties):
"input": {"key": "input", "type": "MongoDbFinishCommandInput"},
}
- def __init__(self, *, input: Optional["_models.MongoDbFinishCommandInput"] = None, **kwargs):
+ def __init__(self, *, input: Optional["_models.MongoDbFinishCommandInput"] = None, **kwargs: Any) -> None:
"""
:keyword input: Command input.
:paramtype input: ~azure.mgmt.datamigration.models.MongoDbFinishCommandInput
@@ -14036,7 +15025,7 @@ def __init__(self, *, input: Optional["_models.MongoDbFinishCommandInput"] = Non
class MongoDbFinishCommandInput(MongoDbCommandInput):
"""Describes the input to the 'finish' MongoDB migration command.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar object_name: The qualified name of a database or collection to act upon, or null to act
upon the entire migration.
@@ -14056,7 +15045,7 @@ class MongoDbFinishCommandInput(MongoDbCommandInput):
"immediate": {"key": "immediate", "type": "bool"},
}
- def __init__(self, *, immediate: bool, object_name: Optional[str] = None, **kwargs):
+ def __init__(self, *, immediate: bool, object_name: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword object_name: The qualified name of a database or collection to act upon, or null to
act upon the entire migration.
@@ -14070,10 +15059,10 @@ def __init__(self, *, immediate: bool, object_name: Optional[str] = None, **kwar
self.immediate = immediate
-class MongoDbMigrationProgress(MongoDbProgress): # pylint: disable=too-many-instance-attributes
+class MongoDbMigrationProgress(MongoDbProgress):
"""Describes the progress of the overall migration.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar bytes_copied: The number of document bytes copied during the Copying stage. Required.
:vartype bytes_copied: int
@@ -14169,8 +15158,8 @@ def __init__(
name: Optional[str] = None,
qualified_name: Optional[str] = None,
databases: Optional[Dict[str, "_models.MongoDbDatabaseProgress"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword bytes_copied: The number of document bytes copied during the Copying stage. Required.
:paramtype bytes_copied: int
@@ -14237,7 +15226,7 @@ def __init__(
class MongoDbMigrationSettings(_serialization.Model):
"""Describes how a MongoDB data migration should be performed.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar boost_r_us: The RU limit on a CosmosDB target that collections will be temporarily
increased to (if lower) during the initial copy of a migration, from 10,000 to 1,000,000, or 0
@@ -14282,8 +15271,8 @@ def __init__(
boost_r_us: Optional[int] = None,
replication: Optional[Union[str, "_models.MongoDbReplication"]] = None,
throttling: Optional["_models.MongoDbThrottlingSettings"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword boost_r_us: The RU limit on a CosmosDB target that collections will be temporarily
increased to (if lower) during the initial copy of a migration, from 10,000 to 1,000,000, or 0
@@ -14317,7 +15306,7 @@ class MongoDbRestartCommand(CommandProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar command_type: Command type. Required. Known values are: "Migrate.Sync.Complete.Database",
"Migrate.SqlServer.AzureDbSqlMi.Complete", "cancel", "finish", and "restart".
@@ -14344,7 +15333,7 @@ class MongoDbRestartCommand(CommandProperties):
"input": {"key": "input", "type": "MongoDbCommandInput"},
}
- def __init__(self, *, input: Optional["_models.MongoDbCommandInput"] = None, **kwargs):
+ def __init__(self, *, input: Optional["_models.MongoDbCommandInput"] = None, **kwargs: Any) -> None:
"""
:keyword input: Command input.
:paramtype input: ~azure.mgmt.datamigration.models.MongoDbCommandInput
@@ -14357,7 +15346,7 @@ def __init__(self, *, input: Optional["_models.MongoDbCommandInput"] = None, **k
class MongoDbShardKeyField(_serialization.Model):
"""Describes a field reference within a MongoDB shard key.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar name: The name of the field. Required.
:vartype name: str
@@ -14376,7 +15365,7 @@ class MongoDbShardKeyField(_serialization.Model):
"order": {"key": "order", "type": "str"},
}
- def __init__(self, *, name: str, order: Union[str, "_models.MongoDbShardKeyOrder"], **kwargs):
+ def __init__(self, *, name: str, order: Union[str, "_models.MongoDbShardKeyOrder"], **kwargs: Any) -> None:
"""
:keyword name: The name of the field. Required.
:paramtype name: str
@@ -14392,7 +15381,7 @@ def __init__(self, *, name: str, order: Union[str, "_models.MongoDbShardKeyOrder
class MongoDbShardKeyInfo(_serialization.Model):
"""Describes a MongoDB shard key.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar fields: The fields within the shard key. Required.
:vartype fields: list[~azure.mgmt.datamigration.models.MongoDbShardKeyField]
@@ -14410,7 +15399,7 @@ class MongoDbShardKeyInfo(_serialization.Model):
"is_unique": {"key": "isUnique", "type": "bool"},
}
- def __init__(self, *, fields: List["_models.MongoDbShardKeyField"], is_unique: bool, **kwargs):
+ def __init__(self, *, fields: List["_models.MongoDbShardKeyField"], is_unique: bool, **kwargs: Any) -> None:
"""
:keyword fields: The fields within the shard key. Required.
:paramtype fields: list[~azure.mgmt.datamigration.models.MongoDbShardKeyField]
@@ -14425,7 +15414,7 @@ def __init__(self, *, fields: List["_models.MongoDbShardKeyField"], is_unique: b
class MongoDbShardKeySetting(_serialization.Model):
"""Describes a MongoDB shard key.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar fields: The fields within the shard key. Required.
:vartype fields: list[~azure.mgmt.datamigration.models.MongoDbShardKeyField]
@@ -14442,7 +15431,9 @@ class MongoDbShardKeySetting(_serialization.Model):
"is_unique": {"key": "isUnique", "type": "bool"},
}
- def __init__(self, *, fields: List["_models.MongoDbShardKeyField"], is_unique: Optional[bool] = None, **kwargs):
+ def __init__(
+ self, *, fields: List["_models.MongoDbShardKeyField"], is_unique: Optional[bool] = None, **kwargs: Any
+ ) -> None:
"""
:keyword fields: The fields within the shard key. Required.
:paramtype fields: list[~azure.mgmt.datamigration.models.MongoDbShardKeyField]
@@ -14480,8 +15471,8 @@ def __init__(
min_free_cpu: Optional[int] = None,
min_free_memory_mb: Optional[int] = None,
max_parallelism: Optional[int] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword min_free_cpu: The percentage of CPU time that the migrator will try to avoid using,
from 0 to 100.
@@ -14499,10 +15490,111 @@ def __init__(
self.max_parallelism = max_parallelism
+class MongoMigrationCollection(_serialization.Model):
+ """Mongo source and target database and collection details.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar source_database: Source database name.
+ :vartype source_database: str
+ :ivar source_collection: Source collection name.
+ :vartype source_collection: str
+ :ivar target_database: Target database name.
+ :vartype target_database: str
+ :ivar target_collection: Target collection name.
+ :vartype target_collection: str
+ :ivar migration_progress_details: Detailed migration status. Not included by default.
+ :vartype migration_progress_details:
+ ~azure.mgmt.datamigration.models.MongoMigrationProgressDetails
+ """
+
+ _validation = {
+ "migration_progress_details": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "source_database": {"key": "sourceDatabase", "type": "str"},
+ "source_collection": {"key": "sourceCollection", "type": "str"},
+ "target_database": {"key": "targetDatabase", "type": "str"},
+ "target_collection": {"key": "targetCollection", "type": "str"},
+ "migration_progress_details": {"key": "migrationProgressDetails", "type": "MongoMigrationProgressDetails"},
+ }
+
+ def __init__(
+ self,
+ *,
+ source_database: Optional[str] = None,
+ source_collection: Optional[str] = None,
+ target_database: Optional[str] = None,
+ target_collection: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword source_database: Source database name.
+ :paramtype source_database: str
+ :keyword source_collection: Source collection name.
+ :paramtype source_collection: str
+ :keyword target_database: Target database name.
+ :paramtype target_database: str
+ :keyword target_collection: Target collection name.
+ :paramtype target_collection: str
+ """
+ super().__init__(**kwargs)
+ self.source_database = source_database
+ self.source_collection = source_collection
+ self.target_database = target_database
+ self.target_collection = target_collection
+ self.migration_progress_details = None
+
+
+class MongoMigrationProgressDetails(_serialization.Model):
+ """Detailed status of collection migration.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar migration_status: Migration Status. Known values are: "NotStarted", "InProgress",
+ "Completed", "Failed", and "Canceled".
+ :vartype migration_status: str or ~azure.mgmt.datamigration.models.MongoMigrationStatus
+ :ivar migration_error: Migration Error.
+ :vartype migration_error: str
+ :ivar source_document_count: Source Document Count.
+ :vartype source_document_count: int
+ :ivar processed_document_count: Processed Document Count.
+ :vartype processed_document_count: int
+ :ivar duration_in_seconds: Migration duration.
+ :vartype duration_in_seconds: int
+ """
+
+ _validation = {
+ "migration_status": {"readonly": True},
+ "migration_error": {"readonly": True},
+ "source_document_count": {"readonly": True},
+ "processed_document_count": {"readonly": True},
+ "duration_in_seconds": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "migration_status": {"key": "migrationStatus", "type": "str"},
+ "migration_error": {"key": "migrationError", "type": "str"},
+ "source_document_count": {"key": "sourceDocumentCount", "type": "int"},
+ "processed_document_count": {"key": "processedDocumentCount", "type": "int"},
+ "duration_in_seconds": {"key": "durationInSeconds", "type": "int"},
+ }
+
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
+ super().__init__(**kwargs)
+ self.migration_status = None
+ self.migration_error = None
+ self.source_document_count = None
+ self.processed_document_count = None
+ self.duration_in_seconds = None
+
+
class MySqlConnectionInfo(ConnectionInfo):
"""Information for connecting to MySQL server.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar type: Type of connection info. Required.
:vartype type: str
@@ -14555,8 +15647,8 @@ def __init__(
encrypt_connection: bool = True,
authentication: Optional[Union[str, "_models.AuthenticationType"]] = None,
additional_settings: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword user_name: User name.
:paramtype user_name: str
@@ -14601,7 +15693,7 @@ class NameAvailabilityRequest(_serialization.Model):
"type": {"key": "type", "type": "str"},
}
- def __init__(self, *, name: Optional[str] = None, type: Optional[str] = None, **kwargs):
+ def __init__(self, *, name: Optional[str] = None, type: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword name: The proposed resource name.
:paramtype name: str
@@ -14638,8 +15730,8 @@ def __init__(
name_available: Optional[bool] = None,
reason: Optional[Union[str, "_models.NameCheckFailureReason"]] = None,
message: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword name_available: If true, the name is valid and available. If false, 'reason' describes
why not.
@@ -14708,7 +15800,7 @@ class NodeMonitoringData(_serialization.Model):
"received_bytes": {"key": "receivedBytes", "type": "float"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.additional_properties = None
@@ -14733,7 +15825,7 @@ class NonSqlDataMigrationTable(_serialization.Model):
"source_name": {"key": "sourceName", "type": "str"},
}
- def __init__(self, *, source_name: Optional[str] = None, **kwargs):
+ def __init__(self, *, source_name: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword source_name: Source table name.
:paramtype source_name: str
@@ -14785,7 +15877,7 @@ class NonSqlDataMigrationTableResult(_serialization.Model):
"errors": {"key": "errors", "type": "[DataMigrationError]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.result_code = None
@@ -14800,7 +15892,7 @@ def __init__(self, **kwargs):
class NonSqlMigrationTaskInput(_serialization.Model):
"""Base class for non sql migration task input.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar target_connection_info: Information for connecting to target. Required.
:vartype target_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -14839,8 +15931,8 @@ def __init__(
project_name: str,
project_location: str,
selected_tables: List["_models.NonSqlDataMigrationTable"],
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword target_connection_info: Information for connecting to target. Required.
:paramtype target_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -14910,7 +16002,7 @@ class NonSqlMigrationTaskOutput(_serialization.Model):
"target_server_name": {"key": "targetServerName", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -14947,8 +16039,8 @@ def __init__(
code: Optional[str] = None,
message: Optional[str] = None,
details: Optional[List["_models.ODataError"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword code: The machine-readable description of the error, such as 'InvalidRequest' or
'InternalServerError'.
@@ -14980,7 +16072,9 @@ class OfflineConfiguration(_serialization.Model):
"last_backup_name": {"key": "lastBackupName", "type": "str"},
}
- def __init__(self, *, offline: Optional[bool] = None, last_backup_name: Optional[str] = None, **kwargs):
+ def __init__(
+ self, *, offline: Optional[bool] = None, last_backup_name: Optional[str] = None, **kwargs: Any
+ ) -> None:
"""
:keyword offline: Offline migration.
:paramtype offline: bool
@@ -15015,7 +16109,7 @@ class OperationListResult(_serialization.Model):
"next_link": {"key": "nextLink", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.value = None
@@ -15054,7 +16148,7 @@ class OperationsDefinition(_serialization.Model):
"properties": {"key": "properties", "type": "{object}"},
}
- def __init__(self, *, is_data_action: Optional[bool] = None, **kwargs):
+ def __init__(self, *, is_data_action: Optional[bool] = None, **kwargs: Any) -> None:
"""
:keyword is_data_action: Indicates whether the operation is a data action.
:paramtype is_data_action: bool
@@ -15096,7 +16190,7 @@ class OperationsDisplayDefinition(_serialization.Model):
"description": {"key": "description", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.provider = None
@@ -15108,7 +16202,7 @@ def __init__(self, **kwargs):
class OracleConnectionInfo(ConnectionInfo):
"""Information for connecting to Oracle server.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar type: Type of connection info. Required.
:vartype type: str
@@ -15156,8 +16250,8 @@ def __init__(
server_version: Optional[str] = None,
port: Optional[int] = None,
authentication: Optional[Union[str, "_models.AuthenticationType"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword user_name: User name.
:paramtype user_name: str
@@ -15223,7 +16317,7 @@ class OracleOCIDriverInfo(_serialization.Model):
"supported_oracle_versions": {"key": "supportedOracleVersions", "type": "[str]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.driver_name = None
@@ -15248,7 +16342,7 @@ class OrphanedUserInfo(_serialization.Model):
"database_name": {"key": "databaseName", "type": "str"},
}
- def __init__(self, *, name: Optional[str] = None, database_name: Optional[str] = None, **kwargs):
+ def __init__(self, *, name: Optional[str] = None, database_name: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword name: Name of the orphaned user.
:paramtype name: str
@@ -15260,10 +16354,10 @@ def __init__(self, *, name: Optional[str] = None, database_name: Optional[str] =
self.database_name = database_name
-class PostgreSqlConnectionInfo(ConnectionInfo): # pylint: disable=too-many-instance-attributes
+class PostgreSqlConnectionInfo(ConnectionInfo):
"""Information for connecting to PostgreSQL server.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar type: Type of connection info. Required.
:vartype type: str
@@ -15332,8 +16426,8 @@ def __init__(
additional_settings: Optional[str] = None,
server_brand_version: Optional[str] = None,
authentication: Optional[Union[str, "_models.AuthenticationType"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword user_name: User name.
:paramtype user_name: str
@@ -15376,7 +16470,7 @@ def __init__(
self.authentication = authentication
-class Project(TrackedResource): # pylint: disable=too-many-instance-attributes
+class Project(TrackedResourceAutoGenerated):
"""A project resource.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -15392,7 +16486,7 @@ class Project(TrackedResource): # pylint: disable=too-many-instance-attributes
:ivar type:
:vartype type: str
:ivar system_data:
- :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated
:ivar etag: HTTP strong entity tag value. This is ignored if submitted.
:vartype etag: str
:ivar source_platform: Source platform for the project. Known values are: "SQL", "MySQL",
@@ -15432,7 +16526,7 @@ class Project(TrackedResource): # pylint: disable=too-many-instance-attributes
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
- "system_data": {"key": "systemData", "type": "SystemData"},
+ "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"},
"etag": {"key": "etag", "type": "str"},
"source_platform": {"key": "properties.sourcePlatform", "type": "str"},
"azure_authentication_info": {"key": "properties.azureAuthenticationInfo", "type": "AzureActiveDirectoryApp"},
@@ -15456,8 +16550,8 @@ def __init__(
source_connection_info: Optional["_models.ConnectionInfo"] = None,
target_connection_info: Optional["_models.ConnectionInfo"] = None,
databases_info: Optional[List["_models.DatabaseInfo"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword location:
:paramtype location: str
@@ -15493,7 +16587,7 @@ def __init__(
self.provisioning_state = None
-class Resource(_serialization.Model):
+class ResourceAutoGenerated(_serialization.Model):
"""ARM resource.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -15518,7 +16612,7 @@ class Resource(_serialization.Model):
"type": {"key": "type", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -15526,7 +16620,7 @@ def __init__(self, **kwargs):
self.type = None
-class ProjectFile(Resource):
+class ProjectFile(ResourceAutoGenerated):
"""A file resource.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -15542,7 +16636,7 @@ class ProjectFile(Resource):
:ivar properties: Custom file properties.
:vartype properties: ~azure.mgmt.datamigration.models.ProjectFileProperties
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
- :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated
"""
_validation = {
@@ -15558,12 +16652,12 @@ class ProjectFile(Resource):
"type": {"key": "type", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"properties": {"key": "properties", "type": "ProjectFileProperties"},
- "system_data": {"key": "systemData", "type": "SystemData"},
+ "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"},
}
def __init__(
- self, *, etag: Optional[str] = None, properties: Optional["_models.ProjectFileProperties"] = None, **kwargs
- ):
+ self, *, etag: Optional[str] = None, properties: Optional["_models.ProjectFileProperties"] = None, **kwargs: Any
+ ) -> None:
"""
:keyword etag: HTTP strong entity tag value. This is ignored if submitted.
:paramtype etag: str
@@ -15615,8 +16709,8 @@ def __init__(
extension: Optional[str] = None,
file_path: Optional[str] = None,
media_type: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword extension: Optional File extension. If submitted it should not have a leading period
and must match the extension from filePath.
@@ -15650,7 +16744,9 @@ class ProjectList(_serialization.Model):
"next_link": {"key": "nextLink", "type": "str"},
}
- def __init__(self, *, value: Optional[List["_models.Project"]] = None, next_link: Optional[str] = None, **kwargs):
+ def __init__(
+ self, *, value: Optional[List["_models.Project"]] = None, next_link: Optional[str] = None, **kwargs: Any
+ ) -> None:
"""
:keyword value: List of projects.
:paramtype value: list[~azure.mgmt.datamigration.models.Project]
@@ -15662,7 +16758,7 @@ def __init__(self, *, value: Optional[List["_models.Project"]] = None, next_link
self.next_link = next_link
-class ProjectTask(Resource):
+class ProjectTask(ResourceAutoGenerated):
"""A task resource.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -15678,7 +16774,7 @@ class ProjectTask(Resource):
:ivar properties: Custom task properties.
:vartype properties: ~azure.mgmt.datamigration.models.ProjectTaskProperties
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
- :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated
"""
_validation = {
@@ -15694,12 +16790,12 @@ class ProjectTask(Resource):
"type": {"key": "type", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"properties": {"key": "properties", "type": "ProjectTaskProperties"},
- "system_data": {"key": "systemData", "type": "SystemData"},
+ "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"},
}
def __init__(
- self, *, etag: Optional[str] = None, properties: Optional["_models.ProjectTaskProperties"] = None, **kwargs
- ):
+ self, *, etag: Optional[str] = None, properties: Optional["_models.ProjectTaskProperties"] = None, **kwargs: Any
+ ) -> None:
"""
:keyword etag: HTTP strong entity tag value. This is ignored if submitted.
:paramtype etag: str
@@ -15731,8 +16827,8 @@ def __init__(
*,
query_results: Optional["_models.QueryExecutionResult"] = None,
validation_errors: Optional["_models.ValidationError"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword query_results: List of queries executed and it's execution results in source and
target.
@@ -15772,8 +16868,8 @@ def __init__(
statements_in_batch: Optional[int] = None,
source_result: Optional["_models.ExecutionStatistics"] = None,
target_result: Optional["_models.ExecutionStatistics"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword query_text: Query text retrieved from the source server.
:paramtype query_text: str
@@ -15824,8 +16920,8 @@ def __init__(
limit: Optional[float] = None,
name: Optional["_models.QuotaName"] = None,
unit: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword current_value: The current value of the quota. If null or missing, the current value
cannot be determined in the context of the request.
@@ -15863,7 +16959,9 @@ class QuotaList(_serialization.Model):
"next_link": {"key": "nextLink", "type": "str"},
}
- def __init__(self, *, value: Optional[List["_models.Quota"]] = None, next_link: Optional[str] = None, **kwargs):
+ def __init__(
+ self, *, value: Optional[List["_models.Quota"]] = None, next_link: Optional[str] = None, **kwargs: Any
+ ) -> None:
"""
:keyword value: List of quotas.
:paramtype value: list[~azure.mgmt.datamigration.models.Quota]
@@ -15890,7 +16988,7 @@ class QuotaName(_serialization.Model):
"value": {"key": "value", "type": "str"},
}
- def __init__(self, *, localized_value: Optional[str] = None, value: Optional[str] = None, **kwargs):
+ def __init__(self, *, localized_value: Optional[str] = None, value: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword localized_value: The localized name of the quota.
:paramtype localized_value: str
@@ -15925,8 +17023,8 @@ def __init__(
key_name: Optional[str] = None,
auth_key1: Optional[str] = None,
auth_key2: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword key_name: The name of authentication key to generate.
:paramtype key_name: str
@@ -15976,8 +17074,8 @@ def __init__(
line_number: Optional[str] = None,
h_result: Optional[int] = None,
stack_trace: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword message: Error message.
:paramtype message: str
@@ -16001,8 +17099,8 @@ def __init__(
self.stack_trace = stack_trace
-class ResourceSku(_serialization.Model): # pylint: disable=too-many-instance-attributes
- """Describes an available DMS SKU.
+class ResourceSku(_serialization.Model):
+ """Describes an available DMS (classic) SKU.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -16010,7 +17108,7 @@ class ResourceSku(_serialization.Model): # pylint: disable=too-many-instance-at
:vartype resource_type: str
:ivar name: The name of SKU.
:vartype name: str
- :ivar tier: Specifies the tier of DMS in a scale set.
+ :ivar tier: Specifies the tier of DMS (classic) in a scale set.
:vartype tier: str
:ivar size: The Size of the SKU.
:vartype size: str
@@ -16063,7 +17161,7 @@ class ResourceSku(_serialization.Model): # pylint: disable=too-many-instance-at
"restrictions": {"key": "restrictions", "type": "[ResourceSkuRestrictions]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.resource_type = None
@@ -16101,7 +17199,7 @@ class ResourceSkuCapabilities(_serialization.Model):
"value": {"key": "value", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.name = None
@@ -16138,7 +17236,7 @@ class ResourceSkuCapacity(_serialization.Model):
"scale_type": {"key": "scaleType", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.minimum = None
@@ -16172,7 +17270,7 @@ class ResourceSkuCosts(_serialization.Model):
"extended_unit": {"key": "extendedUnit", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.meter_id = None
@@ -16207,7 +17305,7 @@ class ResourceSkuRestrictions(_serialization.Model):
"reason_code": {"key": "reasonCode", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.type = None
@@ -16216,14 +17314,14 @@ def __init__(self, **kwargs):
class ResourceSkusResult(_serialization.Model):
- """The DMS List SKUs operation response.
+ """The DMS (classic) List SKUs operation response.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar value: The list of SKUs available for the subscription. Required.
:vartype value: list[~azure.mgmt.datamigration.models.ResourceSku]
- :ivar next_link: The uri to fetch the next page of DMS SKUs. Call ListNext() with this to fetch
- the next page of DMS SKUs.
+ :ivar next_link: The uri to fetch the next page of DMS (classic) SKUs. Call ListNext() with
+ this to fetch the next page of DMS (classic) SKUs.
:vartype next_link: str
"""
@@ -16236,12 +17334,12 @@ class ResourceSkusResult(_serialization.Model):
"next_link": {"key": "nextLink", "type": "str"},
}
- def __init__(self, *, value: List["_models.ResourceSku"], next_link: Optional[str] = None, **kwargs):
+ def __init__(self, *, value: List["_models.ResourceSku"], next_link: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword value: The list of SKUs available for the subscription. Required.
:paramtype value: list[~azure.mgmt.datamigration.models.ResourceSku]
- :keyword next_link: The uri to fetch the next page of DMS SKUs. Call ListNext() with this to
- fetch the next page of DMS SKUs.
+ :keyword next_link: The uri to fetch the next page of DMS (classic) SKUs. Call ListNext() with
+ this to fetch the next page of DMS (classic) SKUs.
:paramtype next_link: str
"""
super().__init__(**kwargs)
@@ -16278,8 +17376,8 @@ def __init__(
validation_errors: Optional["_models.ValidationError"] = None,
source_database_object_count: Optional[Dict[str, int]] = None,
target_database_object_count: Optional[Dict[str, int]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword schema_differences: List of schema differences between the source and target
databases.
@@ -16326,8 +17424,8 @@ def __init__(
object_name: Optional[str] = None,
object_type: Optional[Union[str, "_models.ObjectType"]] = None,
update_action: Optional[Union[str, "_models.UpdateActionType"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword object_name: Name of the object that has the difference.
:paramtype object_name: str
@@ -16369,8 +17467,8 @@ def __init__(
schema_option: Optional[Union[str, "_models.SchemaMigrationOption"]] = None,
file_id: Optional[str] = None,
file_name: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword schema_option: Option on how to migrate the schema. Known values are: "None",
"ExtractFromSource", and "UseStorageFile".
@@ -16389,7 +17487,7 @@ def __init__(
class SelectedCertificateInput(_serialization.Model):
"""Info for certificate to be exported for TDE enabled databases.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar certificate_name: Name of certificate to be exported. Required.
:vartype certificate_name: str
@@ -16407,7 +17505,7 @@ class SelectedCertificateInput(_serialization.Model):
"password": {"key": "password", "type": "str"},
}
- def __init__(self, *, certificate_name: str, password: str, **kwargs):
+ def __init__(self, *, certificate_name: str, password: str, **kwargs: Any) -> None:
"""
:keyword certificate_name: Name of certificate to be exported. Required.
:paramtype certificate_name: str
@@ -16456,7 +17554,7 @@ class ServerProperties(_serialization.Model):
"server_database_count": {"key": "serverDatabaseCount", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.server_platform = None
@@ -16468,7 +17566,7 @@ def __init__(self, **kwargs):
class ServiceOperation(_serialization.Model):
- """Description of an action supported by the Database Migration Service.
+ """Description of an action supported by the Azure Database Migration Service (classic).
:ivar name: The fully qualified action name, e.g. Microsoft.DataMigration/services/read.
:vartype name: str
@@ -16482,8 +17580,8 @@ class ServiceOperation(_serialization.Model):
}
def __init__(
- self, *, name: Optional[str] = None, display: Optional["_models.ServiceOperationDisplay"] = None, **kwargs
- ):
+ self, *, name: Optional[str] = None, display: Optional["_models.ServiceOperationDisplay"] = None, **kwargs: Any
+ ) -> None:
"""
:keyword name: The fully qualified action name, e.g. Microsoft.DataMigration/services/read.
:paramtype name: str
@@ -16522,8 +17620,8 @@ def __init__(
resource: Optional[str] = None,
operation: Optional[str] = None,
description: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword provider: The localized resource provider name.
:paramtype provider: str
@@ -16556,8 +17654,12 @@ class ServiceOperationList(_serialization.Model):
}
def __init__(
- self, *, value: Optional[List["_models.ServiceOperation"]] = None, next_link: Optional[str] = None, **kwargs
- ):
+ self,
+ *,
+ value: Optional[List["_models.ServiceOperation"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword value: List of actions.
:paramtype value: list[~azure.mgmt.datamigration.models.ServiceOperation]
@@ -16602,8 +17704,8 @@ def __init__(
family: Optional[str] = None,
size: Optional[str] = None,
capacity: Optional[int] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword name: The unique name of the SKU, such as 'P3'.
:paramtype name: str
@@ -16641,8 +17743,12 @@ class ServiceSkuList(_serialization.Model):
}
def __init__(
- self, *, value: Optional[List["_models.AvailableServiceSku"]] = None, next_link: Optional[str] = None, **kwargs
- ):
+ self,
+ *,
+ value: Optional[List["_models.AvailableServiceSku"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword value: List of service SKUs.
:paramtype value: list[~azure.mgmt.datamigration.models.AvailableServiceSku]
@@ -16682,8 +17788,8 @@ def __init__(
*,
file_share: Optional["_models.SqlFileShare"] = None,
azure_blob: Optional["_models.AzureBlob"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword file_share: Source File share.
:paramtype file_share: ~azure.mgmt.datamigration.models.SqlFileShare
@@ -16742,7 +17848,7 @@ class SqlBackupFileInfo(_serialization.Model):
"family_sequence_number": {"key": "familySequenceNumber", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.file_name = None
@@ -16755,7 +17861,7 @@ def __init__(self, **kwargs):
self.family_sequence_number = None
-class SqlBackupSetInfo(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class SqlBackupSetInfo(_serialization.Model):
"""Information of backup set.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -16812,7 +17918,7 @@ class SqlBackupSetInfo(_serialization.Model): # pylint: disable=too-many-instan
"ignore_reasons": {"key": "ignoreReasons", "type": "[str]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.backup_set_id = None
@@ -16828,10 +17934,10 @@ def __init__(self, **kwargs):
self.ignore_reasons = None
-class SqlConnectionInfo(ConnectionInfo): # pylint: disable=too-many-instance-attributes
+class SqlConnectionInfo(ConnectionInfo):
"""Information for connecting to SQL database server.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar type: Type of connection info. Required.
:vartype type: str
@@ -16840,7 +17946,7 @@ class SqlConnectionInfo(ConnectionInfo): # pylint: disable=too-many-instance-at
:ivar password: Password credential.
:vartype password: str
:ivar data_source: Data source in the format
- Protocol:MachineName\SQLServerInstanceName,PortNumber. Required.
+ Protocol:MachineName\\SQLServerInstanceName,PortNumber. Required.
:vartype data_source: str
:ivar server_name: name of the server.
:vartype server_name: str
@@ -16905,15 +18011,15 @@ def __init__(
additional_settings: Optional[str] = None,
trust_server_certificate: bool = False,
platform: Optional[Union[str, "_models.SqlSourcePlatform"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword user_name: User name.
:paramtype user_name: str
:keyword password: Password credential.
:paramtype password: str
:keyword data_source: Data source in the format
- Protocol:MachineName\SQLServerInstanceName,PortNumber. Required.
+ Protocol:MachineName\\SQLServerInstanceName,PortNumber. Required.
:paramtype data_source: str
:keyword server_name: name of the server.
:paramtype server_name: str
@@ -16989,8 +18095,8 @@ def __init__(
password: Optional[str] = None,
encrypt_connection: Optional[bool] = None,
trust_server_certificate: Optional[bool] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword data_source: Data source.
:paramtype data_source: str
@@ -17040,7 +18146,7 @@ class SqlDbMigrationStatusDetails(_serialization.Model):
"list_of_copy_progress_details": {"key": "listOfCopyProgressDetails", "type": "[CopyProgressDetails]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.migration_state = None
@@ -17065,7 +18171,7 @@ class SqlDbOfflineConfiguration(_serialization.Model):
"offline": {"key": "offline", "type": "bool"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.offline = None
@@ -17089,8 +18195,13 @@ class SqlFileShare(_serialization.Model):
}
def __init__(
- self, *, path: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, **kwargs
- ):
+ self,
+ *,
+ path: Optional[str] = None,
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword path: Location as SMB share or local drive where backups are placed.
:paramtype path: str
@@ -17126,14 +18237,14 @@ class SqlMigrationListResult(_serialization.Model):
"next_link": {"key": "nextLink", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.value = None
self.next_link = None
-class SqlMigrationService(TrackedResource):
+class SqlMigrationService(TrackedResourceAutoGenerated):
"""A SQL Migration Service.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -17149,7 +18260,7 @@ class SqlMigrationService(TrackedResource):
:ivar type:
:vartype type: str
:ivar system_data:
- :vartype system_data: ~azure.mgmt.datamigration.models.SystemData
+ :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated
:ivar provisioning_state: Provisioning state to track the async operation status.
:vartype provisioning_state: str
:ivar integration_runtime_state: Current state of the Integration runtime.
@@ -17171,12 +18282,12 @@ class SqlMigrationService(TrackedResource):
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
- "system_data": {"key": "systemData", "type": "SystemData"},
+ "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
"integration_runtime_state": {"key": "properties.integrationRuntimeState", "type": "str"},
}
- def __init__(self, *, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs):
+ def __init__(self, *, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None:
"""
:keyword location:
:paramtype location: str
@@ -17199,7 +18310,7 @@ class SqlMigrationServiceUpdate(_serialization.Model):
"tags": {"key": "tags", "type": "{str}"},
}
- def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs):
+ def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None:
"""
:keyword tags: Dictionary of :code:``.
:paramtype tags: dict[str, str]
@@ -17211,8 +18322,8 @@ def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs):
class SsisMigrationInfo(_serialization.Model):
"""SSIS migration info with SSIS store type, overwrite policy.
- :ivar ssis_store_type: The SSIS store type of source, only SSIS catalog is supported now in
- DMS. "SsisCatalog"
+ :ivar ssis_store_type: The SSIS store type of source, only SSIS catalog is supported now in DMS
+ (classic). "SsisCatalog"
:vartype ssis_store_type: str or ~azure.mgmt.datamigration.models.SsisStoreType
:ivar project_overwrite_option: The overwrite option for the SSIS project migration. Known
values are: "Ignore" and "Overwrite".
@@ -17236,11 +18347,11 @@ def __init__(
ssis_store_type: Optional[Union[str, "_models.SsisStoreType"]] = None,
project_overwrite_option: Optional[Union[str, "_models.SsisMigrationOverwriteOption"]] = None,
environment_overwrite_option: Optional[Union[str, "_models.SsisMigrationOverwriteOption"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword ssis_store_type: The SSIS store type of source, only SSIS catalog is supported now in
- DMS. "SsisCatalog"
+ DMS (classic). "SsisCatalog"
:paramtype ssis_store_type: str or ~azure.mgmt.datamigration.models.SsisStoreType
:keyword project_overwrite_option: The overwrite option for the SSIS project migration. Known
values are: "Ignore" and "Overwrite".
@@ -17283,7 +18394,7 @@ class StartMigrationScenarioServerRoleResult(_serialization.Model):
"exceptions_and_warnings": {"key": "exceptionsAndWarnings", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.name = None
@@ -17316,7 +18427,7 @@ class SyncMigrationDatabaseErrorEvent(_serialization.Model):
"event_text": {"key": "eventText", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.timestamp_string = None
@@ -17325,18 +18436,83 @@ def __init__(self, **kwargs):
class SystemData(_serialization.Model):
- """SystemData.
+ """Metadata pertaining to creation and last modification of the resource.
+
+ :ivar created_by: The identity that created the resource.
+ :vartype created_by: str
+ :ivar created_by_type: The type of identity that created the resource. Known values are:
+ "User", "Application", "ManagedIdentity", and "Key".
+ :vartype created_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType
+ :ivar created_at: The timestamp of resource creation (UTC).
+ :vartype created_at: ~datetime.datetime
+ :ivar last_modified_by: The identity that last modified the resource.
+ :vartype last_modified_by: str
+ :ivar last_modified_by_type: The type of identity that last modified the resource. Known values
+ are: "User", "Application", "ManagedIdentity", and "Key".
+ :vartype last_modified_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType
+ :ivar last_modified_at: The timestamp of resource last modification (UTC).
+ :vartype last_modified_at: ~datetime.datetime
+ """
+
+ _attribute_map = {
+ "created_by": {"key": "createdBy", "type": "str"},
+ "created_by_type": {"key": "createdByType", "type": "str"},
+ "created_at": {"key": "createdAt", "type": "iso-8601"},
+ "last_modified_by": {"key": "lastModifiedBy", "type": "str"},
+ "last_modified_by_type": {"key": "lastModifiedByType", "type": "str"},
+ "last_modified_at": {"key": "lastModifiedAt", "type": "iso-8601"},
+ }
+
+ def __init__(
+ self,
+ *,
+ created_by: Optional[str] = None,
+ created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None,
+ created_at: Optional[datetime.datetime] = None,
+ last_modified_by: Optional[str] = None,
+ last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None,
+ last_modified_at: Optional[datetime.datetime] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword created_by: The identity that created the resource.
+ :paramtype created_by: str
+ :keyword created_by_type: The type of identity that created the resource. Known values are:
+ "User", "Application", "ManagedIdentity", and "Key".
+ :paramtype created_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType
+ :keyword created_at: The timestamp of resource creation (UTC).
+ :paramtype created_at: ~datetime.datetime
+ :keyword last_modified_by: The identity that last modified the resource.
+ :paramtype last_modified_by: str
+ :keyword last_modified_by_type: The type of identity that last modified the resource. Known
+ values are: "User", "Application", "ManagedIdentity", and "Key".
+ :paramtype last_modified_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType
+ :keyword last_modified_at: The timestamp of resource last modification (UTC).
+ :paramtype last_modified_at: ~datetime.datetime
+ """
+ super().__init__(**kwargs)
+ self.created_by = created_by
+ self.created_by_type = created_by_type
+ self.created_at = created_at
+ self.last_modified_by = last_modified_by
+ self.last_modified_by_type = last_modified_by_type
+ self.last_modified_at = last_modified_at
+
+
+class SystemDataAutoGenerated(_serialization.Model):
+ """SystemDataAutoGenerated.
:ivar created_by:
:vartype created_by: str
- :ivar created_by_type: Known values are: "User", "Application", "ManagedIdentity", and "Key".
+ :ivar created_by_type: The type of identity that created the resource. Known values are:
+ "User", "Application", "ManagedIdentity", and "Key".
:vartype created_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType
:ivar created_at:
:vartype created_at: ~datetime.datetime
:ivar last_modified_by:
:vartype last_modified_by: str
- :ivar last_modified_by_type: Known values are: "User", "Application", "ManagedIdentity", and
- "Key".
+ :ivar last_modified_by_type: The type of identity that created the resource. Known values are:
+ "User", "Application", "ManagedIdentity", and "Key".
:vartype last_modified_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType
:ivar last_modified_at:
:vartype last_modified_at: ~datetime.datetime
@@ -17360,20 +18536,20 @@ def __init__(
last_modified_by: Optional[str] = None,
last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None,
last_modified_at: Optional[datetime.datetime] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword created_by:
:paramtype created_by: str
- :keyword created_by_type: Known values are: "User", "Application", "ManagedIdentity", and
- "Key".
+ :keyword created_by_type: The type of identity that created the resource. Known values are:
+ "User", "Application", "ManagedIdentity", and "Key".
:paramtype created_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType
:keyword created_at:
:paramtype created_at: ~datetime.datetime
:keyword last_modified_by:
:paramtype last_modified_by: str
- :keyword last_modified_by_type: Known values are: "User", "Application", "ManagedIdentity", and
- "Key".
+ :keyword last_modified_by_type: The type of identity that created the resource. Known values
+ are: "User", "Application", "ManagedIdentity", and "Key".
:paramtype last_modified_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType
:keyword last_modified_at:
:paramtype last_modified_at: ~datetime.datetime
@@ -17402,8 +18578,8 @@ class TargetLocation(_serialization.Model):
}
def __init__(
- self, *, storage_account_resource_id: Optional[str] = None, account_key: Optional[str] = None, **kwargs
- ):
+ self, *, storage_account_resource_id: Optional[str] = None, account_key: Optional[str] = None, **kwargs: Any
+ ) -> None:
"""
:keyword storage_account_resource_id: Resource Id of the storage account copying backups.
:paramtype storage_account_resource_id: str
@@ -17430,8 +18606,8 @@ class TaskList(_serialization.Model):
}
def __init__(
- self, *, value: Optional[List["_models.ProjectTask"]] = None, next_link: Optional[str] = None, **kwargs
- ):
+ self, *, value: Optional[List["_models.ProjectTask"]] = None, next_link: Optional[str] = None, **kwargs: Any
+ ) -> None:
"""
:keyword value: List of tasks.
:paramtype value: list[~azure.mgmt.datamigration.models.ProjectTask]
@@ -17454,7 +18630,7 @@ class UploadOCIDriverTaskInput(_serialization.Model):
"driver_share": {"key": "driverShare", "type": "FileShare"},
}
- def __init__(self, *, driver_share: Optional["_models.FileShare"] = None, **kwargs):
+ def __init__(self, *, driver_share: Optional["_models.FileShare"] = None, **kwargs: Any) -> None:
"""
:keyword driver_share: File share information for the OCI driver archive.
:paramtype driver_share: ~azure.mgmt.datamigration.models.FileShare
@@ -17484,7 +18660,7 @@ class UploadOCIDriverTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.driver_package_name = None
@@ -17496,7 +18672,7 @@ class UploadOCIDriverTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -17555,8 +18731,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.UploadOCIDriverTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -17569,12 +18745,40 @@ def __init__(
self.output = None
-class ValidateMigrationInputSqlServerSqlDbSyncTaskProperties(ProjectTaskProperties):
+class UserAssignedIdentity(_serialization.Model):
+ """User assigned identity properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of the assigned identity.
+ :vartype principal_id: str
+ :ivar client_id: The client ID of the assigned identity.
+ :vartype client_id: str
+ """
+
+ _validation = {
+ "principal_id": {"readonly": True},
+ "client_id": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "principal_id": {"key": "principalId", "type": "str"},
+ "client_id": {"key": "clientId", "type": "str"},
+ }
+
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
+ super().__init__(**kwargs)
+ self.principal_id = None
+ self.client_id = None
+
+
+class ValidateMigrationInputSqlServerSqlDbSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long
"""Properties for task that validates migration input for SQL to Azure SQL DB sync migrations.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -17634,8 +18838,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.ValidateSyncMigrationInputSqlServerTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -17648,10 +18852,11 @@ def __init__(
self.output = None
-class ValidateMigrationInputSqlServerSqlMISyncTaskInput(SqlServerSqlMISyncTaskInput):
- """Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online scenario.
+class ValidateMigrationInputSqlServerSqlMISyncTaskInput(SqlServerSqlMISyncTaskInput): # pylint: disable=name-too-long
+ """Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online
+ scenario.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar selected_databases: Databases to migrate. Required.
:vartype selected_databases:
@@ -17665,70 +18870,16 @@ class ValidateMigrationInputSqlServerSqlMISyncTaskInput(SqlServerSqlMISyncTaskIn
:ivar target_connection_info: Connection information for Azure SQL Database Managed Instance.
Required.
:vartype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo
- :ivar azure_app: Azure Active Directory Application the DMS instance will use to connect to the
- target instance of Azure SQL Database Managed Instance and the Azure Storage Account. Required.
+ :ivar azure_app: Azure Active Directory Application the DMS (classic) instance will use to
+ connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage
+ Account. Required.
:vartype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp
"""
- _validation = {
- "selected_databases": {"required": True},
- "storage_resource_id": {"required": True},
- "source_connection_info": {"required": True},
- "target_connection_info": {"required": True},
- "azure_app": {"required": True},
- }
-
- _attribute_map = {
- "selected_databases": {"key": "selectedDatabases", "type": "[MigrateSqlServerSqlMIDatabaseInput]"},
- "backup_file_share": {"key": "backupFileShare", "type": "FileShare"},
- "storage_resource_id": {"key": "storageResourceId", "type": "str"},
- "source_connection_info": {"key": "sourceConnectionInfo", "type": "SqlConnectionInfo"},
- "target_connection_info": {"key": "targetConnectionInfo", "type": "MiSqlConnectionInfo"},
- "azure_app": {"key": "azureApp", "type": "AzureActiveDirectoryApp"},
- }
-
- def __init__(
- self,
- *,
- selected_databases: List["_models.MigrateSqlServerSqlMIDatabaseInput"],
- storage_resource_id: str,
- source_connection_info: "_models.SqlConnectionInfo",
- target_connection_info: "_models.MiSqlConnectionInfo",
- azure_app: "_models.AzureActiveDirectoryApp",
- backup_file_share: Optional["_models.FileShare"] = None,
- **kwargs
- ):
- """
- :keyword selected_databases: Databases to migrate. Required.
- :paramtype selected_databases:
- list[~azure.mgmt.datamigration.models.MigrateSqlServerSqlMIDatabaseInput]
- :keyword backup_file_share: Backup file share information for all selected databases.
- :paramtype backup_file_share: ~azure.mgmt.datamigration.models.FileShare
- :keyword storage_resource_id: Fully qualified resourceId of storage. Required.
- :paramtype storage_resource_id: str
- :keyword source_connection_info: Connection information for source SQL Server. Required.
- :paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
- :keyword target_connection_info: Connection information for Azure SQL Database Managed
- Instance. Required.
- :paramtype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo
- :keyword azure_app: Azure Active Directory Application the DMS instance will use to connect to
- the target instance of Azure SQL Database Managed Instance and the Azure Storage Account.
- Required.
- :paramtype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp
- """
- super().__init__(
- selected_databases=selected_databases,
- backup_file_share=backup_file_share,
- storage_resource_id=storage_resource_id,
- source_connection_info=source_connection_info,
- target_connection_info=target_connection_info,
- azure_app=azure_app,
- **kwargs
- )
-
-class ValidateMigrationInputSqlServerSqlMISyncTaskOutput(_serialization.Model):
- """Output for task that validates migration input for Azure SQL Database Managed Instance online migration.
+class ValidateMigrationInputSqlServerSqlMISyncTaskOutput(_serialization.Model): # pylint: disable=name-too-long
+ """Output for task that validates migration input for Azure SQL Database Managed Instance online
+ migration.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -17752,7 +18903,7 @@ class ValidateMigrationInputSqlServerSqlMISyncTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -17760,12 +18911,13 @@ def __init__(self, **kwargs):
self.validation_errors = None
-class ValidateMigrationInputSqlServerSqlMISyncTaskProperties(ProjectTaskProperties):
- """Properties for task that validates migration input for SQL to Azure SQL Database Managed Instance sync scenario.
+class ValidateMigrationInputSqlServerSqlMISyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long
+ """Properties for task that validates migration input for SQL to Azure SQL Database Managed
+ Instance sync scenario.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -17826,8 +18978,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.ValidateMigrationInputSqlServerSqlMISyncTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -17841,10 +18993,10 @@ def __init__(
self.output = None
-class ValidateMigrationInputSqlServerSqlMITaskInput(_serialization.Model):
+class ValidateMigrationInputSqlServerSqlMITaskInput(_serialization.Model): # pylint: disable=name-too-long
"""Input for task that validates migration input for SQL to Azure SQL Managed Instance.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Information for connecting to source. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -17892,8 +19044,8 @@ def __init__(
selected_logins: Optional[List[str]] = None,
backup_file_share: Optional["_models.FileShare"] = None,
backup_mode: Optional[Union[str, "_models.BackupMode"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Information for connecting to source. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -17923,8 +19075,9 @@ def __init__(
self.backup_mode = backup_mode
-class ValidateMigrationInputSqlServerSqlMITaskOutput(_serialization.Model):
- """Output for task that validates migration input for SQL to Azure SQL Managed Instance migrations.
+class ValidateMigrationInputSqlServerSqlMITaskOutput(_serialization.Model): # pylint: disable=name-too-long
+ """Output for task that validates migration input for SQL to Azure SQL Managed Instance
+ migrations.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -17971,7 +19124,7 @@ class ValidateMigrationInputSqlServerSqlMITaskOutput(_serialization.Model):
"database_backup_info": {"key": "databaseBackupInfo", "type": "DatabaseBackupInfo"},
}
- def __init__(self, *, database_backup_info: Optional["_models.DatabaseBackupInfo"] = None, **kwargs):
+ def __init__(self, *, database_backup_info: Optional["_models.DatabaseBackupInfo"] = None, **kwargs: Any) -> None:
"""
:keyword database_backup_info: Information about backup files when existing backup mode is
used.
@@ -17988,12 +19141,13 @@ def __init__(self, *, database_backup_info: Optional["_models.DatabaseBackupInfo
self.database_backup_info = database_backup_info
-class ValidateMigrationInputSqlServerSqlMITaskProperties(ProjectTaskProperties):
- """Properties for task that validates migration input for SQL to Azure SQL Database Managed Instance.
+class ValidateMigrationInputSqlServerSqlMITaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long
+ """Properties for task that validates migration input for SQL to Azure SQL Database Managed
+ Instance.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -18053,8 +19207,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.ValidateMigrationInputSqlServerSqlMITaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -18073,7 +19227,7 @@ class ValidateMongoDbTaskProperties(ProjectTaskProperties):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -18132,8 +19286,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.MongoDbMigrationSettings"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -18146,12 +19300,13 @@ def __init__(
self.output = None
-class ValidateOracleAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties):
- """Properties for the task that validates a migration for Oracle to Azure Database for PostgreSQL for online migrations.
+class ValidateOracleAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long
+ """Properties for the task that validates a migration for Oracle to Azure Database for PostgreSQL
+ for online migrations.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar task_type: Task type. Required. Known values are: "Connect.MongoDb",
"ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync",
@@ -18212,8 +19367,8 @@ def __init__(
*,
client_data: Optional[Dict[str, str]] = None,
input: Optional["_models.MigrateOracleAzureDbPostgreSqlSyncTaskInput"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword client_data: Key value pairs of client data to attach meta data information to task.
:paramtype client_data: dict[str, str]
@@ -18227,8 +19382,9 @@ def __init__(
self.output = None
-class ValidateOracleAzureDbPostgreSqlSyncTaskOutput(_serialization.Model):
- """Output for task that validates migration input for Oracle to Azure Database for PostgreSQL for online migrations.
+class ValidateOracleAzureDbPostgreSqlSyncTaskOutput(_serialization.Model): # pylint: disable=name-too-long
+ """Output for task that validates migration input for Oracle to Azure Database for PostgreSQL for
+ online migrations.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -18244,16 +19400,16 @@ class ValidateOracleAzureDbPostgreSqlSyncTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.validation_errors = None
-class ValidateSyncMigrationInputSqlServerTaskInput(_serialization.Model):
+class ValidateSyncMigrationInputSqlServerTaskInput(_serialization.Model): # pylint: disable=name-too-long
"""Input for task that validates migration input for SQL sync migrations.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar source_connection_info: Information for connecting to source SQL server. Required.
:vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -18282,8 +19438,8 @@ def __init__(
source_connection_info: "_models.SqlConnectionInfo",
target_connection_info: "_models.SqlConnectionInfo",
selected_databases: List["_models.MigrateSqlServerSqlDbSyncDatabaseInput"],
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword source_connection_info: Information for connecting to source SQL server. Required.
:paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo
@@ -18299,7 +19455,7 @@ def __init__(
self.selected_databases = selected_databases
-class ValidateSyncMigrationInputSqlServerTaskOutput(_serialization.Model):
+class ValidateSyncMigrationInputSqlServerTaskOutput(_serialization.Model): # pylint: disable=name-too-long
"""Output for task that validates migration input for SQL sync migrations.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -18324,7 +19480,7 @@ class ValidateSyncMigrationInputSqlServerTaskOutput(_serialization.Model):
"validation_errors": {"key": "validationErrors", "type": "[ReportableException]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -18347,8 +19503,8 @@ class ValidationError(_serialization.Model):
}
def __init__(
- self, *, text: Optional[str] = None, severity: Optional[Union[str, "_models.Severity"]] = None, **kwargs
- ):
+ self, *, text: Optional[str] = None, severity: Optional[Union[str, "_models.Severity"]] = None, **kwargs: Any
+ ) -> None:
"""
:keyword text: Error Text.
:paramtype text: str
@@ -18378,8 +19534,13 @@ class WaitStatistics(_serialization.Model):
}
def __init__(
- self, *, wait_type: Optional[str] = None, wait_time_ms: float = 0, wait_count: Optional[int] = None, **kwargs
- ):
+ self,
+ *,
+ wait_type: Optional[str] = None,
+ wait_time_ms: float = 0,
+ wait_count: Optional[int] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword wait_type: Type of the Wait.
:paramtype wait_type: str
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/__init__.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/__init__.py
index 4825871afb87..08a84f29ada2 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/__init__.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/__init__.py
@@ -5,29 +5,41 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._database_migrations_sql_db_operations import DatabaseMigrationsSqlDbOperations
-from ._database_migrations_sql_mi_operations import DatabaseMigrationsSqlMiOperations
-from ._database_migrations_sql_vm_operations import DatabaseMigrationsSqlVmOperations
-from ._operations import Operations
-from ._sql_migration_services_operations import SqlMigrationServicesOperations
-from ._resource_skus_operations import ResourceSkusOperations
-from ._services_operations import ServicesOperations
-from ._tasks_operations import TasksOperations
-from ._service_tasks_operations import ServiceTasksOperations
-from ._projects_operations import ProjectsOperations
-from ._usages_operations import UsagesOperations
-from ._files_operations import FilesOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._database_migrations_mongo_to_cosmos_db_ru_mongo_operations import DatabaseMigrationsMongoToCosmosDbRUMongoOperations # type: ignore
+from ._database_migrations_mongo_to_cosmos_dbv_core_mongo_operations import DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations # type: ignore
+from ._database_migrations_sql_db_operations import DatabaseMigrationsSqlDbOperations # type: ignore
+from ._database_migrations_sql_mi_operations import DatabaseMigrationsSqlMiOperations # type: ignore
+from ._database_migrations_sql_vm_operations import DatabaseMigrationsSqlVmOperations # type: ignore
+from ._operations import Operations # type: ignore
+from ._migration_services_operations import MigrationServicesOperations # type: ignore
+from ._sql_migration_services_operations import SqlMigrationServicesOperations # type: ignore
+from ._resource_skus_operations import ResourceSkusOperations # type: ignore
+from ._services_operations import ServicesOperations # type: ignore
+from ._tasks_operations import TasksOperations # type: ignore
+from ._service_tasks_operations import ServiceTasksOperations # type: ignore
+from ._projects_operations import ProjectsOperations # type: ignore
+from ._usages_operations import UsagesOperations # type: ignore
+from ._files_operations import FilesOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
+ "DatabaseMigrationsMongoToCosmosDbRUMongoOperations",
+ "DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations",
"DatabaseMigrationsSqlDbOperations",
"DatabaseMigrationsSqlMiOperations",
"DatabaseMigrationsSqlVmOperations",
"Operations",
+ "MigrationServicesOperations",
"SqlMigrationServicesOperations",
"ResourceSkusOperations",
"ServicesOperations",
@@ -37,5 +49,5 @@
"UsagesOperations",
"FilesOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py
new file mode 100644
index 000000000000..87072e86a7b7
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py
@@ -0,0 +1,693 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from io import IOBase
+import sys
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
+import urllib.parse
+
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
+ map_error,
+)
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.rest import HttpRequest, HttpResponse
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.utils import case_insensitive_dict
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models as _models
+from .._serialization import Serializer
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+
+
+def build_get_request(
+ resource_group_name: str, target_resource_name: str, migration_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations/{migrationName}",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "targetResourceName": _SERIALIZER.url(
+ "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"
+ ),
+ "migrationName": _SERIALIZER.url("migration_name", migration_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_create_request(
+ resource_group_name: str, target_resource_name: str, migration_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations/{migrationName}",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "targetResourceName": _SERIALIZER.url(
+ "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"
+ ),
+ "migrationName": _SERIALIZER.url("migration_name", migration_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ if content_type is not None:
+ _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_delete_request(
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ subscription_id: str,
+ *,
+ force: Optional[bool] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations/{migrationName}",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "targetResourceName": _SERIALIZER.url(
+ "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"
+ ),
+ "migrationName": _SERIALIZER.url("migration_name", migration_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ if force is not None:
+ _params["force"] = _SERIALIZER.query("force", force, "bool")
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_get_for_scope_request(
+ resource_group_name: str, target_resource_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "targetResourceName": _SERIALIZER.url(
+ "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"
+ ),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+class DatabaseMigrationsMongoToCosmosDbRUMongoOperations: # pylint: disable=name-too-long
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.mgmt.datamigration.DataMigrationManagementClient`'s
+ :attr:`database_migrations_mongo_to_cosmos_db_ru_mongo` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs):
+ input_args = list(args)
+ self._client = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ @distributed_trace
+ def get(
+ self, resource_group_name: str, target_resource_name: str, migration_name: str, **kwargs: Any
+ ) -> _models.DatabaseMigrationCosmosDbMongo:
+ """Get Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :return: DatabaseMigrationCosmosDbMongo or the result of cls(response)
+ :rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None)
+
+ _request = build_get_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ def _create_initial(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]],
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(parameters, (IOBase, bytes)):
+ _content = parameters
+ else:
+ _json = self._serialize.body(parameters, "DatabaseMigrationCosmosDbMongo")
+
+ _request = build_create_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ content_type=content_type,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ def begin_create(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: _models.DatabaseMigrationCosmosDbMongo,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.DatabaseMigrationCosmosDbMongo]:
+ """Create or Update Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param parameters: Details of CosmosDB for Mongo API Migration resource. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns either DatabaseMigrationCosmosDbMongo or the
+ result of cls(response)
+ :rtype:
+ ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ def begin_create(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.DatabaseMigrationCosmosDbMongo]:
+ """Create or Update Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param parameters: Details of CosmosDB for Mongo API Migration resource. Required.
+ :type parameters: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns either DatabaseMigrationCosmosDbMongo or the
+ result of cls(response)
+ :rtype:
+ ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace
+ def begin_create(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]],
+ **kwargs: Any
+ ) -> LROPoller[_models.DatabaseMigrationCosmosDbMongo]:
+ """Create or Update Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param parameters: Details of CosmosDB for Mongo API Migration resource. Is either a
+ DatabaseMigrationCosmosDbMongo type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo or IO[bytes]
+ :return: An instance of LROPoller that returns either DatabaseMigrationCosmosDbMongo or the
+ result of cls(response)
+ :rtype:
+ ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = self._create_initial(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ parameters=parameters,
+ api_version=api_version,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response)
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ if polling is True:
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(PollingMethod, NoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return LROPoller[_models.DatabaseMigrationCosmosDbMongo].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return LROPoller[_models.DatabaseMigrationCosmosDbMongo](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ def _delete_initial(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ force: Optional[bool] = None,
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
+
+ _request = build_delete_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ subscription_id=self._config.subscription_id,
+ force=force,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ def begin_delete(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ force: Optional[bool] = None,
+ **kwargs: Any
+ ) -> LROPoller[None]:
+ """Delete Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param force: Optional force delete boolean. If this is provided as true, migration will be
+ deleted even if active. Default value is None.
+ :type force: bool
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ force=force,
+ api_version=api_version,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
+ if cls:
+ return cls(pipeline_response, None, {}) # type: ignore
+
+ if polling is True:
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(PollingMethod, NoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return LROPoller[None].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
+
+ @distributed_trace
+ def get_for_scope(
+ self, resource_group_name: str, target_resource_name: str, **kwargs: Any
+ ) -> Iterable["_models.DatabaseMigrationCosmosDbMongo"]:
+ """Get Database Migration resources for the scope.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :return: An iterator like instance of either DatabaseMigrationCosmosDbMongo or the result of
+ cls(response)
+ :rtype:
+ ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.DatabaseMigrationCosmosDbMongoListResult] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_get_for_scope_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize("DatabaseMigrationCosmosDbMongoListResult", pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(get_next, extract_data)
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py
new file mode 100644
index 000000000000..eb4e537368b6
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py
@@ -0,0 +1,693 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from io import IOBase
+import sys
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
+import urllib.parse
+
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
+ map_error,
+)
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.rest import HttpRequest, HttpResponse
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.utils import case_insensitive_dict
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models as _models
+from .._serialization import Serializer
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+
+
+def build_get_request(
+ resource_group_name: str, target_resource_name: str, migration_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/mongoClusters/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations/{migrationName}",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "targetResourceName": _SERIALIZER.url(
+ "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"
+ ),
+ "migrationName": _SERIALIZER.url("migration_name", migration_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_create_request(
+ resource_group_name: str, target_resource_name: str, migration_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/mongoClusters/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations/{migrationName}",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "targetResourceName": _SERIALIZER.url(
+ "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"
+ ),
+ "migrationName": _SERIALIZER.url("migration_name", migration_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ if content_type is not None:
+ _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_delete_request(
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ subscription_id: str,
+ *,
+ force: Optional[bool] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/mongoClusters/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations/{migrationName}",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "targetResourceName": _SERIALIZER.url(
+ "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"
+ ),
+ "migrationName": _SERIALIZER.url("migration_name", migration_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ if force is not None:
+ _params["force"] = _SERIALIZER.query("force", force, "bool")
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_get_for_scope_request(
+ resource_group_name: str, target_resource_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/mongoClusters/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "targetResourceName": _SERIALIZER.url(
+ "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"
+ ),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+class DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations: # pylint: disable=name-too-long
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.mgmt.datamigration.DataMigrationManagementClient`'s
+ :attr:`database_migrations_mongo_to_cosmos_dbv_core_mongo` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs):
+ input_args = list(args)
+ self._client = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ @distributed_trace
+ def get(
+ self, resource_group_name: str, target_resource_name: str, migration_name: str, **kwargs: Any
+ ) -> _models.DatabaseMigrationCosmosDbMongo:
+ """Get Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :return: DatabaseMigrationCosmosDbMongo or the result of cls(response)
+ :rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None)
+
+ _request = build_get_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ def _create_initial(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]],
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(parameters, (IOBase, bytes)):
+ _content = parameters
+ else:
+ _json = self._serialize.body(parameters, "DatabaseMigrationCosmosDbMongo")
+
+ _request = build_create_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ content_type=content_type,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ def begin_create(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: _models.DatabaseMigrationCosmosDbMongo,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.DatabaseMigrationCosmosDbMongo]:
+ """Create or Update Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param parameters: Details of CosmosDB for Mongo API Migration resource. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns either DatabaseMigrationCosmosDbMongo or the
+ result of cls(response)
+ :rtype:
+ ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ def begin_create(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.DatabaseMigrationCosmosDbMongo]:
+ """Create or Update Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param parameters: Details of CosmosDB for Mongo API Migration resource. Required.
+ :type parameters: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns either DatabaseMigrationCosmosDbMongo or the
+ result of cls(response)
+ :rtype:
+ ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace
+ def begin_create(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]],
+ **kwargs: Any
+ ) -> LROPoller[_models.DatabaseMigrationCosmosDbMongo]:
+ """Create or Update Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param parameters: Details of CosmosDB for Mongo API Migration resource. Is either a
+ DatabaseMigrationCosmosDbMongo type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo or IO[bytes]
+ :return: An instance of LROPoller that returns either DatabaseMigrationCosmosDbMongo or the
+ result of cls(response)
+ :rtype:
+ ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = self._create_initial(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ parameters=parameters,
+ api_version=api_version,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response)
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ if polling is True:
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(PollingMethod, NoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return LROPoller[_models.DatabaseMigrationCosmosDbMongo].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return LROPoller[_models.DatabaseMigrationCosmosDbMongo](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ def _delete_initial(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ force: Optional[bool] = None,
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
+
+ _request = build_delete_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ subscription_id=self._config.subscription_id,
+ force=force,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ def begin_delete(
+ self,
+ resource_group_name: str,
+ target_resource_name: str,
+ migration_name: str,
+ force: Optional[bool] = None,
+ **kwargs: Any
+ ) -> LROPoller[None]:
+ """Delete Database Migration resource.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :param migration_name: Name of the migration. Required.
+ :type migration_name: str
+ :param force: Optional force delete boolean. If this is provided as true, migration will be
+ deleted even if active. Default value is None.
+ :type force: bool
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ migration_name=migration_name,
+ force=force,
+ api_version=api_version,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
+ if cls:
+ return cls(pipeline_response, None, {}) # type: ignore
+
+ if polling is True:
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(PollingMethod, NoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return LROPoller[None].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
+
+ @distributed_trace
+ def get_for_scope(
+ self, resource_group_name: str, target_resource_name: str, **kwargs: Any
+ ) -> Iterable["_models.DatabaseMigrationCosmosDbMongo"]:
+ """Get Database Migration resources for the scope.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param target_resource_name: The name of the target resource/account. Required.
+ :type target_resource_name: str
+ :return: An iterator like instance of either DatabaseMigrationCosmosDbMongo or the result of
+ cls(response)
+ :rtype:
+ ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.DatabaseMigrationCosmosDbMongoListResult] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_get_for_scope_request(
+ resource_group_name=resource_group_name,
+ target_resource_name=target_resource_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize("DatabaseMigrationCosmosDbMongoListResult", pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(get_next, extract_data)
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_db_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_db_operations.py
index 29e046dbd4fe..b03b887c4d41 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_db_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_db_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,8 +5,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterator, Optional, TypeVar, Union, cast, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -15,12 +15,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -28,12 +29,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -54,9 +54,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -71,7 +69,7 @@ def build_get_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
if migration_operation_id is not None:
@@ -92,9 +90,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -110,7 +106,7 @@ def build_create_or_update_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -134,9 +130,7 @@ def build_delete_request(
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
# Construct URL
_url = kwargs.pop(
"template_url",
@@ -149,7 +143,7 @@ def build_delete_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
if force is not None:
@@ -165,9 +159,7 @@ def build_cancel_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
# Construct URL
_url = kwargs.pop(
@@ -181,7 +173,7 @@ def build_cancel_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -237,12 +229,11 @@ def get(
:type migration_operation_id: str
:param expand: Complete migration details be included in the response. Default value is None.
:type expand: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: DatabaseMigrationSqlDb or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -253,12 +244,10 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DatabaseMigrationSqlDb] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
sql_db_instance_name=sql_db_instance_name,
target_db_name=target_db_name,
@@ -266,15 +255,14 @@ def get(
migration_operation_id=migration_operation_id,
expand=expand,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -283,26 +271,22 @@ def get(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response)
+ deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return deserialized # type: ignore
def _create_or_update_initial(
self,
resource_group_name: str,
sql_db_instance_name: str,
target_db_name: str,
- parameters: Union[_models.DatabaseMigrationSqlDb, IO],
+ parameters: Union[_models.DatabaseMigrationSqlDb, IO[bytes]],
**kwargs: Any
- ) -> _models.DatabaseMigrationSqlDb:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -313,21 +297,19 @@ def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.DatabaseMigrationSqlDb] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "DatabaseMigrationSqlDb")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
sql_db_instance_name=sql_db_instance_name,
target_db_name=target_db_name,
@@ -336,38 +318,34 @@ def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
-
@overload
def begin_create_or_update(
self,
@@ -393,14 +371,6 @@ def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either DatabaseMigrationSqlDb or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb]
@@ -413,7 +383,7 @@ def begin_create_or_update(
resource_group_name: str,
sql_db_instance_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -428,18 +398,10 @@ def begin_create_or_update(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Details of Sql Db migration resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either DatabaseMigrationSqlDb or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb]
@@ -452,7 +414,7 @@ def begin_create_or_update(
resource_group_name: str,
sql_db_instance_name: str,
target_db_name: str,
- parameters: Union[_models.DatabaseMigrationSqlDb, IO],
+ parameters: Union[_models.DatabaseMigrationSqlDb, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.DatabaseMigrationSqlDb]:
"""Create or Update Database Migration resource.
@@ -464,20 +426,9 @@ def begin_create_or_update(
:type sql_db_instance_name: str
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
- :param parameters: Details of Sql Db migration resource. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Details of Sql Db migration resource. Is either a DatabaseMigrationSqlDb
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb or IO[bytes]
:return: An instance of LROPoller that returns either DatabaseMigrationSqlDb or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb]
@@ -486,9 +437,7 @@ def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DatabaseMigrationSqlDb] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
@@ -507,12 +456,13 @@ def begin_create_or_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response)
+ deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -522,27 +472,25 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.DatabaseMigrationSqlDb].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return LROPoller[_models.DatabaseMigrationSqlDb](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- def _delete_initial( # pylint: disable=inconsistent-return-statements
+ def _delete_initial(
self,
resource_group_name: str,
sql_db_instance_name: str,
target_db_name: str,
force: Optional[bool] = None,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -553,41 +501,43 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
sql_db_instance_name=sql_db_instance_name,
target_db_name=target_db_name,
subscription_id=self._config.subscription_id,
force=force,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(
@@ -610,14 +560,6 @@ def begin_delete(
:param force: Optional force delete boolean. If this is provided as true, migration will be
deleted even if active. Default value is None.
:type force: bool
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -625,15 +567,13 @@ def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
sql_db_instance_name=sql_db_instance_name,
target_db_name=target_db_name,
@@ -644,11 +584,12 @@ def begin_delete(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
@@ -657,27 +598,23 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _cancel_initial( # pylint: disable=inconsistent-return-statements
+ def _cancel_initial(
self,
resource_group_name: str,
sql_db_instance_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -688,21 +625,19 @@ def _cancel_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "MigrationOperationInput")
- request = build_cancel_request(
+ _request = build_cancel_request(
resource_group_name=resource_group_name,
sql_db_instance_name=sql_db_instance_name,
target_db_name=target_db_name,
@@ -711,29 +646,33 @@ def _cancel_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._cancel_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _cancel_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel"
- }
+ return deserialized # type: ignore
@overload
def begin_cancel(
@@ -761,14 +700,6 @@ def begin_cancel(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -780,7 +711,7 @@ def begin_cancel(
resource_group_name: str,
sql_db_instance_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -796,18 +727,10 @@ def begin_cancel(
:type target_db_name: str
:param parameters: Required migration operation ID for which cancel will be initiated.
Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -819,7 +742,7 @@ def begin_cancel(
resource_group_name: str,
sql_db_instance_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
) -> LROPoller[None]:
"""Stop on going migration for the database.
@@ -832,19 +755,8 @@ def begin_cancel(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Required migration operation ID for which cancel will be initiated. Is
- either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ either a MigrationOperationInput type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes]
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -852,16 +764,14 @@ def begin_cancel(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._cancel_initial( # type: ignore
+ raw_result = self._cancel_initial(
resource_group_name=resource_group_name,
sql_db_instance_name=sql_db_instance_name,
target_db_name=target_db_name,
@@ -873,11 +783,12 @@ def begin_cancel(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
@@ -886,14 +797,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_cancel.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel"
- }
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_mi_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_mi_operations.py
index 165b7c702c25..640f784047bc 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_mi_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_mi_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,8 +5,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterator, Optional, TypeVar, Union, cast, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -15,12 +15,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -28,12 +29,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -54,9 +54,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -71,7 +69,7 @@ def build_get_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
if migration_operation_id is not None:
@@ -92,9 +90,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -110,7 +106,7 @@ def build_create_or_update_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -129,9 +125,7 @@ def build_cancel_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
# Construct URL
_url = kwargs.pop(
@@ -145,7 +139,7 @@ def build_cancel_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -163,9 +157,7 @@ def build_cutover_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
# Construct URL
_url = kwargs.pop(
@@ -179,7 +171,7 @@ def build_cutover_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -235,12 +227,11 @@ def get(
:type migration_operation_id: str
:param expand: Complete migration details be included in the response. Default value is None.
:type expand: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: DatabaseMigrationSqlMi or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -251,12 +242,10 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DatabaseMigrationSqlMi] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
target_db_name=target_db_name,
@@ -264,15 +253,14 @@ def get(
migration_operation_id=migration_operation_id,
expand=expand,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -281,26 +269,22 @@ def get(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response)
+ deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return deserialized # type: ignore
def _create_or_update_initial(
self,
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: Union[_models.DatabaseMigrationSqlMi, IO],
+ parameters: Union[_models.DatabaseMigrationSqlMi, IO[bytes]],
**kwargs: Any
- ) -> _models.DatabaseMigrationSqlMi:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -311,21 +295,19 @@ def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.DatabaseMigrationSqlMi] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "DatabaseMigrationSqlMi")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
target_db_name=target_db_name,
@@ -334,38 +316,34 @@ def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
-
@overload
def begin_create_or_update(
self,
@@ -391,14 +369,6 @@ def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either DatabaseMigrationSqlMi or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi]
@@ -411,7 +381,7 @@ def begin_create_or_update(
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -426,18 +396,10 @@ def begin_create_or_update(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Details of SqlMigrationService resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either DatabaseMigrationSqlMi or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi]
@@ -450,7 +412,7 @@ def begin_create_or_update(
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: Union[_models.DatabaseMigrationSqlMi, IO],
+ parameters: Union[_models.DatabaseMigrationSqlMi, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.DatabaseMigrationSqlMi]:
"""Create a new database migration to a given SQL Managed Instance.
@@ -462,20 +424,9 @@ def begin_create_or_update(
:type managed_instance_name: str
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
- :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO
- type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Details of SqlMigrationService resource. Is either a DatabaseMigrationSqlMi
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi or IO[bytes]
:return: An instance of LROPoller that returns either DatabaseMigrationSqlMi or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi]
@@ -484,9 +435,7 @@ def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DatabaseMigrationSqlMi] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
@@ -505,12 +454,13 @@ def begin_create_or_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response)
+ deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -520,27 +470,25 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.DatabaseMigrationSqlMi].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return LROPoller[_models.DatabaseMigrationSqlMi](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- def _cancel_initial( # pylint: disable=inconsistent-return-statements
+ def _cancel_initial(
self,
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -551,21 +499,19 @@ def _cancel_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "MigrationOperationInput")
- request = build_cancel_request(
+ _request = build_cancel_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
target_db_name=target_db_name,
@@ -574,29 +520,33 @@ def _cancel_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._cancel_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _cancel_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel"
- }
+ return deserialized # type: ignore
@overload
def begin_cancel(
@@ -624,14 +574,6 @@ def begin_cancel(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -643,7 +585,7 @@ def begin_cancel(
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -659,18 +601,10 @@ def begin_cancel(
:type target_db_name: str
:param parameters: Required migration operation ID for which cancel will be initiated.
Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -682,7 +616,7 @@ def begin_cancel(
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
) -> LROPoller[None]:
"""Stop in-progress database migration to SQL Managed Instance.
@@ -695,19 +629,8 @@ def begin_cancel(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Required migration operation ID for which cancel will be initiated. Is
- either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ either a MigrationOperationInput type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes]
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -715,16 +638,14 @@ def begin_cancel(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._cancel_initial( # type: ignore
+ raw_result = self._cancel_initial(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
target_db_name=target_db_name,
@@ -736,11 +657,12 @@ def begin_cancel(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
@@ -749,27 +671,23 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_cancel.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel"
- }
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _cutover_initial( # pylint: disable=inconsistent-return-statements
+ def _cutover_initial(
self,
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -780,21 +698,19 @@ def _cutover_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "MigrationOperationInput")
- request = build_cutover_request(
+ _request = build_cutover_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
target_db_name=target_db_name,
@@ -803,29 +719,33 @@ def _cutover_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._cutover_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _cutover_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover"
- }
+ return deserialized # type: ignore
@overload
def begin_cutover(
@@ -853,14 +773,6 @@ def begin_cutover(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -872,7 +784,7 @@ def begin_cutover(
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -888,18 +800,10 @@ def begin_cutover(
:type target_db_name: str
:param parameters: Required migration operation ID for which cutover will be initiated.
Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -911,7 +815,7 @@ def begin_cutover(
resource_group_name: str,
managed_instance_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
) -> LROPoller[None]:
"""Initiate cutover for in-progress online database migration to SQL Managed Instance.
@@ -924,19 +828,8 @@ def begin_cutover(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Required migration operation ID for which cutover will be initiated. Is
- either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ either a MigrationOperationInput type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes]
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -944,16 +837,14 @@ def begin_cutover(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._cutover_initial( # type: ignore
+ raw_result = self._cutover_initial(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
target_db_name=target_db_name,
@@ -965,11 +856,12 @@ def begin_cutover(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
@@ -978,14 +870,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_cutover.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover"
- }
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_vm_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_vm_operations.py
index 73871d141414..3d2fdf61f6df 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_vm_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_vm_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,8 +5,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterator, Optional, TypeVar, Union, cast, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -15,12 +15,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -28,12 +29,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -54,9 +54,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -71,7 +69,7 @@ def build_get_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
if migration_operation_id is not None:
@@ -92,9 +90,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -110,7 +106,7 @@ def build_create_or_update_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -129,9 +125,7 @@ def build_cancel_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
# Construct URL
_url = kwargs.pop(
@@ -145,7 +139,7 @@ def build_cancel_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -163,9 +157,7 @@ def build_cutover_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
# Construct URL
_url = kwargs.pop(
@@ -179,7 +171,7 @@ def build_cutover_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -235,12 +227,11 @@ def get(
:type migration_operation_id: str
:param expand: Complete migration details be included in the response. Default value is None.
:type expand: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: DatabaseMigrationSqlVm or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -251,12 +242,10 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DatabaseMigrationSqlVm] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
sql_virtual_machine_name=sql_virtual_machine_name,
target_db_name=target_db_name,
@@ -264,15 +253,14 @@ def get(
migration_operation_id=migration_operation_id,
expand=expand,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -281,26 +269,22 @@ def get(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response)
+ deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return deserialized # type: ignore
def _create_or_update_initial(
self,
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: Union[_models.DatabaseMigrationSqlVm, IO],
+ parameters: Union[_models.DatabaseMigrationSqlVm, IO[bytes]],
**kwargs: Any
- ) -> _models.DatabaseMigrationSqlVm:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -311,21 +295,19 @@ def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.DatabaseMigrationSqlVm] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "DatabaseMigrationSqlVm")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
sql_virtual_machine_name=sql_virtual_machine_name,
target_db_name=target_db_name,
@@ -334,38 +316,34 @@ def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
-
@overload
def begin_create_or_update(
self,
@@ -391,14 +369,6 @@ def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either DatabaseMigrationSqlVm or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm]
@@ -411,7 +381,7 @@ def begin_create_or_update(
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -426,18 +396,10 @@ def begin_create_or_update(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Details of SqlMigrationService resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either DatabaseMigrationSqlVm or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm]
@@ -450,7 +412,7 @@ def begin_create_or_update(
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: Union[_models.DatabaseMigrationSqlVm, IO],
+ parameters: Union[_models.DatabaseMigrationSqlVm, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.DatabaseMigrationSqlVm]:
"""Create a new database migration to a given SQL VM.
@@ -462,20 +424,9 @@ def begin_create_or_update(
:type sql_virtual_machine_name: str
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
- :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO
- type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Details of SqlMigrationService resource. Is either a DatabaseMigrationSqlVm
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm or IO[bytes]
:return: An instance of LROPoller that returns either DatabaseMigrationSqlVm or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm]
@@ -484,9 +435,7 @@ def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DatabaseMigrationSqlVm] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
@@ -505,12 +454,13 @@ def begin_create_or_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response)
+ deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -520,27 +470,25 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.DatabaseMigrationSqlVm].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}"
- }
+ return LROPoller[_models.DatabaseMigrationSqlVm](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- def _cancel_initial( # pylint: disable=inconsistent-return-statements
+ def _cancel_initial(
self,
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -551,21 +499,19 @@ def _cancel_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "MigrationOperationInput")
- request = build_cancel_request(
+ _request = build_cancel_request(
resource_group_name=resource_group_name,
sql_virtual_machine_name=sql_virtual_machine_name,
target_db_name=target_db_name,
@@ -574,29 +520,33 @@ def _cancel_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._cancel_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _cancel_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel"
- }
+ return deserialized # type: ignore
@overload
def begin_cancel(
@@ -623,14 +573,6 @@ def begin_cancel(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -642,7 +584,7 @@ def begin_cancel(
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -657,18 +599,10 @@ def begin_cancel(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -680,7 +614,7 @@ def begin_cancel(
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
) -> LROPoller[None]:
"""Stop in-progress database migration to SQL VM.
@@ -692,19 +626,8 @@ def begin_cancel(
:type sql_virtual_machine_name: str
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
- :param parameters: Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Is either a MigrationOperationInput type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes]
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -712,16 +635,14 @@ def begin_cancel(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._cancel_initial( # type: ignore
+ raw_result = self._cancel_initial(
resource_group_name=resource_group_name,
sql_virtual_machine_name=sql_virtual_machine_name,
target_db_name=target_db_name,
@@ -733,11 +654,12 @@ def begin_cancel(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
@@ -746,27 +668,23 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_cancel.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel"
- }
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _cutover_initial( # pylint: disable=inconsistent-return-statements
+ def _cutover_initial(
self,
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -777,21 +695,19 @@ def _cutover_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "MigrationOperationInput")
- request = build_cutover_request(
+ _request = build_cutover_request(
resource_group_name=resource_group_name,
sql_virtual_machine_name=sql_virtual_machine_name,
target_db_name=target_db_name,
@@ -800,29 +716,33 @@ def _cutover_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._cutover_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _cutover_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover"
- }
+ return deserialized # type: ignore
@overload
def begin_cutover(
@@ -849,14 +769,6 @@ def begin_cutover(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -868,7 +780,7 @@ def begin_cutover(
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -883,18 +795,10 @@ def begin_cutover(
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
:param parameters: Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -906,7 +810,7 @@ def begin_cutover(
resource_group_name: str,
sql_virtual_machine_name: str,
target_db_name: str,
- parameters: Union[_models.MigrationOperationInput, IO],
+ parameters: Union[_models.MigrationOperationInput, IO[bytes]],
**kwargs: Any
) -> LROPoller[None]:
"""Initiate cutover for in-progress online database migration to SQL VM.
@@ -918,19 +822,8 @@ def begin_cutover(
:type sql_virtual_machine_name: str
:param target_db_name: The name of the target database. Required.
:type target_db_name: str
- :param parameters: Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Is either a MigrationOperationInput type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes]
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -938,16 +831,14 @@ def begin_cutover(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._cutover_initial( # type: ignore
+ raw_result = self._cutover_initial(
resource_group_name=resource_group_name,
sql_virtual_machine_name=sql_virtual_machine_name,
target_db_name=target_db_name,
@@ -959,11 +850,12 @@ def begin_cutover(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
@@ -972,14 +864,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_cutover.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover"
- }
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_files_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_files_operations.py
index 4f5b912e5f55..9c7ab6c9b143 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_files_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_files_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
@@ -20,20 +20,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -47,9 +45,7 @@ def build_list_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -64,7 +60,7 @@ def build_list_request(
"projectName": _SERIALIZER.url("project_name", project_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -81,9 +77,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -99,7 +93,7 @@ def build_get_request(
"fileName": _SERIALIZER.url("file_name", file_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -116,9 +110,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -135,7 +127,7 @@ def build_create_or_update_request(
"fileName": _SERIALIZER.url("file_name", file_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -154,9 +146,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -172,7 +162,7 @@ def build_delete_request(
"fileName": _SERIALIZER.url("file_name", file_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -189,9 +179,7 @@ def build_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -208,7 +196,7 @@ def build_update_request(
"fileName": _SERIALIZER.url("file_name", file_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -227,9 +215,7 @@ def build_read_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -245,7 +231,7 @@ def build_read_request(
"fileName": _SERIALIZER.url("file_name", file_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -262,9 +248,7 @@ def build_read_write_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -280,7 +264,7 @@ def build_read_write_request(
"fileName": _SERIALIZER.url("file_name", file_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -325,7 +309,6 @@ def list(
:type service_name: str
:param project_name: Name of the project. Required.
:type project_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProjectFile or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.ProjectFile]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -333,12 +316,10 @@ def list(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.FileList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -349,18 +330,16 @@ def list(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -372,13 +351,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("FileList", pipeline_response)
@@ -388,10 +366,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -404,10 +383,6 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files"
- }
-
@distributed_trace
def get(
self, group_name: str, service_name: str, project_name: str, file_name: str, **kwargs: Any
@@ -425,12 +400,11 @@ def get(
:type project_name: str
:param file_name: Name of the File. Required.
:type file_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectFile or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectFile
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -441,27 +415,24 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ProjectFile] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
file_name=file_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -471,16 +442,12 @@ def get(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectFile", pipeline_response)
+ deserialized = self._deserialize("ProjectFile", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}"
- }
+ return deserialized # type: ignore
@overload
def create_or_update(
@@ -511,7 +478,6 @@ def create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectFile or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectFile
:raises ~azure.core.exceptions.HttpResponseError:
@@ -524,7 +490,7 @@ def create_or_update(
service_name: str,
project_name: str,
file_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -542,11 +508,10 @@ def create_or_update(
:param file_name: Name of the File. Required.
:type file_name: str
:param parameters: Information about the file. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectFile or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectFile
:raises ~azure.core.exceptions.HttpResponseError:
@@ -559,7 +524,7 @@ def create_or_update(
service_name: str,
project_name: str,
file_name: str,
- parameters: Union[_models.ProjectFile, IO],
+ parameters: Union[_models.ProjectFile, IO[bytes]],
**kwargs: Any
) -> _models.ProjectFile:
"""Create a file resource.
@@ -574,17 +539,14 @@ def create_or_update(
:type project_name: str
:param file_name: Name of the File. Required.
:type file_name: str
- :param parameters: Information about the file. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Information about the file. Is either a ProjectFile type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO[bytes]
:return: ProjectFile or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectFile
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -595,21 +557,19 @@ def create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ProjectFile] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ProjectFile")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -619,15 +579,14 @@ def create_or_update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -637,21 +596,13 @@ def create_or_update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("ProjectFile", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("ProjectFile", pipeline_response)
+ deserialized = self._deserialize("ProjectFile", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}"
- }
-
@distributed_trace
def delete( # pylint: disable=inconsistent-return-statements
self, group_name: str, service_name: str, project_name: str, file_name: str, **kwargs: Any
@@ -668,12 +619,11 @@ def delete( # pylint: disable=inconsistent-return-statements
:type project_name: str
:param file_name: Name of the File. Required.
:type file_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -684,27 +634,24 @@ def delete( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
file_name=file_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -715,11 +662,7 @@ def delete( # pylint: disable=inconsistent-return-statements
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}"
- }
+ return cls(pipeline_response, None, {}) # type: ignore
@overload
def update(
@@ -750,7 +693,6 @@ def update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectFile or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectFile
:raises ~azure.core.exceptions.HttpResponseError:
@@ -763,7 +705,7 @@ def update(
service_name: str,
project_name: str,
file_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -781,11 +723,10 @@ def update(
:param file_name: Name of the File. Required.
:type file_name: str
:param parameters: Information about the file. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectFile or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectFile
:raises ~azure.core.exceptions.HttpResponseError:
@@ -798,7 +739,7 @@ def update(
service_name: str,
project_name: str,
file_name: str,
- parameters: Union[_models.ProjectFile, IO],
+ parameters: Union[_models.ProjectFile, IO[bytes]],
**kwargs: Any
) -> _models.ProjectFile:
"""Update a file.
@@ -813,17 +754,14 @@ def update(
:type project_name: str
:param file_name: Name of the File. Required.
:type file_name: str
- :param parameters: Information about the file. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Information about the file. Is either a ProjectFile type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO[bytes]
:return: ProjectFile or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectFile
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -834,21 +772,19 @@ def update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ProjectFile] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ProjectFile")
- request = build_update_request(
+ _request = build_update_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -858,15 +794,14 @@ def update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -876,16 +811,12 @@ def update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectFile", pipeline_response)
+ deserialized = self._deserialize("ProjectFile", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}"
- }
+ return deserialized # type: ignore
@distributed_trace
def read(
@@ -904,12 +835,11 @@ def read(
:type project_name: str
:param file_name: Name of the File. Required.
:type file_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: FileStorageInfo or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.FileStorageInfo
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -920,27 +850,24 @@ def read(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.FileStorageInfo] = kwargs.pop("cls", None)
- request = build_read_request(
+ _request = build_read_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
file_name=file_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.read.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -950,16 +877,12 @@ def read(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("FileStorageInfo", pipeline_response)
+ deserialized = self._deserialize("FileStorageInfo", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- read.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}/read"
- }
+ return deserialized # type: ignore
@distributed_trace
def read_write(
@@ -977,12 +900,11 @@ def read_write(
:type project_name: str
:param file_name: Name of the File. Required.
:type file_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: FileStorageInfo or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.FileStorageInfo
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -993,27 +915,24 @@ def read_write(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.FileStorageInfo] = kwargs.pop("cls", None)
- request = build_read_write_request(
+ _request = build_read_write_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
file_name=file_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.read_write.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1023,13 +942,9 @@ def read_write(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("FileStorageInfo", pipeline_response)
+ deserialized = self._deserialize("FileStorageInfo", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- read_write.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}/readwrite"
- }
+ return deserialized # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_migration_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_migration_services_operations.py
new file mode 100644
index 000000000000..30dbc1ce1e2d
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_migration_services_operations.py
@@ -0,0 +1,1080 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from io import IOBase
+import sys
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
+import urllib.parse
+
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
+ map_error,
+)
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.rest import HttpRequest, HttpResponse
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.utils import case_insensitive_dict
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models as _models
+from .._serialization import Serializer
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+
+
+def build_get_request(
+ resource_group_name: str, migration_service_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/migrationServices/{migrationServiceName}",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "migrationServiceName": _SERIALIZER.url(
+ "migration_service_name", migration_service_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"
+ ),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_create_or_update_request(
+ resource_group_name: str, migration_service_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/migrationServices/{migrationServiceName}",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "migrationServiceName": _SERIALIZER.url(
+ "migration_service_name", migration_service_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"
+ ),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ if content_type is not None:
+ _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_delete_request(
+ resource_group_name: str, migration_service_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/migrationServices/{migrationServiceName}",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "migrationServiceName": _SERIALIZER.url(
+ "migration_service_name", migration_service_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"
+ ),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_update_request(
+ resource_group_name: str, migration_service_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/migrationServices/{migrationServiceName}",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "migrationServiceName": _SERIALIZER.url(
+ "migration_service_name", migration_service_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"
+ ),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ if content_type is not None:
+ _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_list_by_resource_group_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/migrationServices",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/migrationServices"
+ )
+ path_format_arguments = {
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_list_migrations_request(
+ resource_group_name: str, migration_service_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/migrationServices/{migrationServiceName}/listMigrations",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
+ "migrationServiceName": _SERIALIZER.url(
+ "migration_service_name", migration_service_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"
+ ),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+class MigrationServicesOperations:
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.mgmt.datamigration.DataMigrationManagementClient`'s
+ :attr:`migration_services` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs):
+ input_args = list(args)
+ self._client = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ @distributed_trace
+ def get(self, resource_group_name: str, migration_service_name: str, **kwargs: Any) -> _models.MigrationService:
+ """Retrieve the Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :return: MigrationService or the result of cls(response)
+ :rtype: ~azure.mgmt.datamigration.models.MigrationService
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.MigrationService] = kwargs.pop("cls", None)
+
+ _request = build_get_request(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize("MigrationService", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: Union[_models.MigrationService, IO[bytes]],
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(parameters, (IOBase, bytes)):
+ _content = parameters
+ else:
+ _json = self._serialize.body(parameters, "MigrationService")
+
+ _request = build_create_or_update_request(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ content_type=content_type,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: _models.MigrationService,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.MigrationService]:
+ """Create or Update Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :param parameters: Details of MigrationService resource. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationService
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns either MigrationService or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.MigrationService]:
+ """Create or Update Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :param parameters: Details of MigrationService resource. Required.
+ :type parameters: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns either MigrationService or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace
+ def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: Union[_models.MigrationService, IO[bytes]],
+ **kwargs: Any
+ ) -> LROPoller[_models.MigrationService]:
+ """Create or Update Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :param parameters: Details of MigrationService resource. Is either a MigrationService type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationService or IO[bytes]
+ :return: An instance of LROPoller that returns either MigrationService or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.MigrationService] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ parameters=parameters,
+ api_version=api_version,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize("MigrationService", pipeline_response.http_response)
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ if polling is True:
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(PollingMethod, NoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return LROPoller[_models.MigrationService].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return LROPoller[_models.MigrationService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ def _delete_initial(self, resource_group_name: str, migration_service_name: str, **kwargs: Any) -> Iterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
+
+ _request = build_delete_request(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ def begin_delete(self, resource_group_name: str, migration_service_name: str, **kwargs: Any) -> LROPoller[None]:
+ """Delete Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ api_version=api_version,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
+ if cls:
+ return cls(pipeline_response, None, {}) # type: ignore
+
+ if polling is True:
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(PollingMethod, NoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return LROPoller[None].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
+
+ def _update_initial(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: Union[_models.MigrationServiceUpdate, IO[bytes]],
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(parameters, (IOBase, bytes)):
+ _content = parameters
+ else:
+ _json = self._serialize.body(parameters, "MigrationServiceUpdate")
+
+ _request = build_update_request(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ content_type=content_type,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ def begin_update(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: _models.MigrationServiceUpdate,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.MigrationService]:
+ """Update Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :param parameters: Details of MigrationService resource. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationServiceUpdate
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns either MigrationService or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ def begin_update(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.MigrationService]:
+ """Update Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :param parameters: Details of MigrationService resource. Required.
+ :type parameters: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns either MigrationService or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace
+ def begin_update(
+ self,
+ resource_group_name: str,
+ migration_service_name: str,
+ parameters: Union[_models.MigrationServiceUpdate, IO[bytes]],
+ **kwargs: Any
+ ) -> LROPoller[_models.MigrationService]:
+ """Update Database Migration Service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :param parameters: Details of MigrationService resource. Is either a MigrationServiceUpdate
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.MigrationServiceUpdate or IO[bytes]
+ :return: An instance of LROPoller that returns either MigrationService or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.MigrationService] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = self._update_initial(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ parameters=parameters,
+ api_version=api_version,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize("MigrationService", pipeline_response.http_response)
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ if polling is True:
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(PollingMethod, NoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return LROPoller[_models.MigrationService].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return LROPoller[_models.MigrationService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ @distributed_trace
+ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.MigrationService"]:
+ """Retrieve all migration services in the resource group.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :return: An iterator like instance of either MigrationService or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.MigrationServiceListResult] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_list_by_resource_group_request(
+ resource_group_name=resource_group_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize("MigrationServiceListResult", pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(get_next, extract_data)
+
+ @distributed_trace
+ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.MigrationService"]:
+ """Retrieve all migration services in the subscriptions.
+
+ :return: An iterator like instance of either MigrationService or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.MigrationService]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.MigrationServiceListResult] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_list_by_subscription_request(
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize("MigrationServiceListResult", pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(get_next, extract_data)
+
+ @distributed_trace
+ def list_migrations(
+ self, resource_group_name: str, migration_service_name: str, **kwargs: Any
+ ) -> Iterable["_models.DatabaseMigrationBase"]:
+ """Retrieve the List of database migrations attached to the service.
+
+ :param resource_group_name: Name of the resource group that contains the resource. You can
+ obtain this value from the Azure Resource Manager API or the portal. Required.
+ :type resource_group_name: str
+ :param migration_service_name: Name of the Migration Service. Required.
+ :type migration_service_name: str
+ :return: An iterator like instance of either DatabaseMigrationBase or the result of
+ cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.DatabaseMigrationBase]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.DatabaseMigrationBaseListResult] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_list_migrations_request(
+ resource_group_name=resource_group_name,
+ migration_service_name=migration_service_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize("DatabaseMigrationBaseListResult", pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(get_next, extract_data)
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_operations.py
index f30e8f2f8534..ab2fb05a5b55 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -20,20 +19,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -45,9 +42,7 @@ def build_list_request(**kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -85,7 +80,6 @@ def __init__(self, *args, **kwargs):
def list(self, **kwargs: Any) -> Iterable["_models.OperationsDefinition"]:
"""Lists all of the available SQL Migration REST API operations.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OperationsDefinition or the result of
cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.OperationsDefinition]
@@ -94,12 +88,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.OperationsDefinition"]:
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -110,14 +102,12 @@ def list(self, **kwargs: Any) -> Iterable["_models.OperationsDefinition"]:
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -129,13 +119,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("OperationListResult", pipeline_response)
@@ -145,10 +134,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -159,5 +149,3 @@ def get_next(next_link=None):
return pipeline_response
return ItemPaged(get_next, extract_data)
-
- list.metadata = {"url": "/providers/Microsoft.DataMigration/operations"}
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_projects_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_projects_operations.py
index d52fdf5ad40f..9fae1ac71f3d 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_projects_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_projects_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
@@ -20,20 +20,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -45,9 +43,7 @@ def build_list_request(group_name: str, service_name: str, subscription_id: str,
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -61,7 +57,7 @@ def build_list_request(group_name: str, service_name: str, subscription_id: str,
"serviceName": _SERIALIZER.url("service_name", service_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -78,9 +74,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -96,7 +90,7 @@ def build_create_or_update_request(
"projectName": _SERIALIZER.url("project_name", project_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -115,9 +109,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -132,7 +124,7 @@ def build_get_request(
"projectName": _SERIALIZER.url("project_name", project_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -155,9 +147,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -172,7 +162,7 @@ def build_delete_request(
"projectName": _SERIALIZER.url("project_name", project_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -191,9 +181,7 @@ def build_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -209,7 +197,7 @@ def build_update_request(
"projectName": _SERIALIZER.url("project_name", project_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -252,7 +240,6 @@ def list(self, group_name: str, service_name: str, **kwargs: Any) -> Iterable["_
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Project or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.Project]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -260,12 +247,10 @@ def list(self, group_name: str, service_name: str, **kwargs: Any) -> Iterable["_
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ProjectList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -276,17 +261,15 @@ def list(self, group_name: str, service_name: str, **kwargs: Any) -> Iterable["_
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -298,13 +281,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("ProjectList", pipeline_response)
@@ -314,10 +296,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -330,10 +313,6 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects"
- }
-
@overload
def create_or_update(
self,
@@ -361,7 +340,6 @@ def create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Project or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises ~azure.core.exceptions.HttpResponseError:
@@ -373,7 +351,7 @@ def create_or_update(
group_name: str,
service_name: str,
project_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -390,11 +368,10 @@ def create_or_update(
:param project_name: Name of the project. Required.
:type project_name: str
:param parameters: Information about the project. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Project or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises ~azure.core.exceptions.HttpResponseError:
@@ -406,7 +383,7 @@ def create_or_update(
group_name: str,
service_name: str,
project_name: str,
- parameters: Union[_models.Project, IO],
+ parameters: Union[_models.Project, IO[bytes]],
**kwargs: Any
) -> _models.Project:
"""Create or update project.
@@ -420,18 +397,14 @@ def create_or_update(
:type service_name: str
:param project_name: Name of the project. Required.
:type project_name: str
- :param parameters: Information about the project. Is either a model type or a IO type.
+ :param parameters: Information about the project. Is either a Project type or a IO[bytes] type.
Required.
- :type parameters: ~azure.mgmt.datamigration.models.Project or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :type parameters: ~azure.mgmt.datamigration.models.Project or IO[bytes]
:return: Project or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -442,21 +415,19 @@ def create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Project] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "Project")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -465,15 +436,14 @@ def create_or_update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -483,21 +453,13 @@ def create_or_update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("Project", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("Project", pipeline_response)
+ deserialized = self._deserialize("Project", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}"
- }
-
@distributed_trace
def get(self, group_name: str, service_name: str, project_name: str, **kwargs: Any) -> _models.Project:
"""Get project information.
@@ -511,12 +473,11 @@ def get(self, group_name: str, service_name: str, project_name: str, **kwargs: A
:type service_name: str
:param project_name: Name of the project. Required.
:type project_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Project or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -527,26 +488,23 @@ def get(self, group_name: str, service_name: str, project_name: str, **kwargs: A
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.Project] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -556,16 +514,12 @@ def get(self, group_name: str, service_name: str, project_name: str, **kwargs: A
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("Project", pipeline_response)
+ deserialized = self._deserialize("Project", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}"
- }
+ return deserialized # type: ignore
@distributed_trace
def delete( # pylint: disable=inconsistent-return-statements
@@ -590,12 +544,11 @@ def delete( # pylint: disable=inconsistent-return-statements
:param delete_running_tasks: Delete the resource even if it contains running tasks. Default
value is None.
:type delete_running_tasks: bool
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -606,27 +559,24 @@ def delete( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
subscription_id=self._config.subscription_id,
delete_running_tasks=delete_running_tasks,
api_version=api_version,
- template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -637,11 +587,7 @@ def delete( # pylint: disable=inconsistent-return-statements
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}"
- }
+ return cls(pipeline_response, None, {}) # type: ignore
@overload
def update(
@@ -670,7 +616,6 @@ def update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Project or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises ~azure.core.exceptions.HttpResponseError:
@@ -682,7 +627,7 @@ def update(
group_name: str,
service_name: str,
project_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -699,11 +644,10 @@ def update(
:param project_name: Name of the project. Required.
:type project_name: str
:param parameters: Information about the project. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Project or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises ~azure.core.exceptions.HttpResponseError:
@@ -715,7 +659,7 @@ def update(
group_name: str,
service_name: str,
project_name: str,
- parameters: Union[_models.Project, IO],
+ parameters: Union[_models.Project, IO[bytes]],
**kwargs: Any
) -> _models.Project:
"""Update project.
@@ -729,18 +673,14 @@ def update(
:type service_name: str
:param project_name: Name of the project. Required.
:type project_name: str
- :param parameters: Information about the project. Is either a model type or a IO type.
+ :param parameters: Information about the project. Is either a Project type or a IO[bytes] type.
Required.
- :type parameters: ~azure.mgmt.datamigration.models.Project or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :type parameters: ~azure.mgmt.datamigration.models.Project or IO[bytes]
:return: Project or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.Project
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -751,21 +691,19 @@ def update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Project] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "Project")
- request = build_update_request(
+ _request = build_update_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -774,15 +712,14 @@ def update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -792,13 +729,9 @@ def update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("Project", pipeline_response)
+ deserialized = self._deserialize("Project", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}"
- }
+ return deserialized # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_resource_skus_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_resource_skus_operations.py
index 83384702f205..774dfcbf0f24 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_resource_skus_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_resource_skus_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -20,20 +19,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -45,9 +42,7 @@ def build_list_skus_request(subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -56,7 +51,7 @@ def build_list_skus_request(subscription_id: str, **kwargs: Any) -> HttpRequest:
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -90,9 +85,8 @@ def __init__(self, *args, **kwargs):
def list_skus(self, **kwargs: Any) -> Iterable["_models.ResourceSku"]:
"""Get supported SKUs.
- The skus action returns the list of SKUs that DMS supports.
+ The skus action returns the list of SKUs that DMS (classic) supports.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ResourceSku or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.ResourceSku]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -100,12 +94,10 @@ def list_skus(self, **kwargs: Any) -> Iterable["_models.ResourceSku"]:
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ResourceSkusResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -116,15 +108,13 @@ def list_skus(self, **kwargs: Any) -> Iterable["_models.ResourceSku"]:
def prepare_request(next_link=None):
if not next_link:
- request = build_list_skus_request(
+ _request = build_list_skus_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_skus.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -136,13 +126,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("ResourceSkusResult", pipeline_response)
@@ -152,10 +141,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -167,5 +157,3 @@ def get_next(next_link=None):
return pipeline_response
return ItemPaged(get_next, extract_data)
-
- list_skus.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/skus"}
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_service_tasks_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_service_tasks_operations.py
index 7a7fc9fdad40..b2d78152935a 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_service_tasks_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_service_tasks_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
@@ -20,20 +20,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -47,9 +45,7 @@ def build_list_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -63,7 +59,7 @@ def build_list_request(
"serviceName": _SERIALIZER.url("service_name", service_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -82,9 +78,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -100,7 +94,7 @@ def build_create_or_update_request(
"taskName": _SERIALIZER.url("task_name", task_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -125,9 +119,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -142,7 +134,7 @@ def build_get_request(
"taskName": _SERIALIZER.url("task_name", task_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -167,9 +159,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -184,7 +174,7 @@ def build_delete_request(
"taskName": _SERIALIZER.url("task_name", task_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -203,9 +193,7 @@ def build_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -221,7 +209,7 @@ def build_update_request(
"taskName": _SERIALIZER.url("task_name", task_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -240,9 +228,7 @@ def build_cancel_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -257,7 +243,7 @@ def build_cancel_request(
"taskName": _SERIALIZER.url("task_name", task_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -293,10 +279,10 @@ def list(
) -> Iterable["_models.ProjectTask"]:
"""Get service level tasks for a service.
- The services resource is the top-level resource that represents the Database Migration Service.
- This method returns a list of service level tasks owned by a service resource. Some tasks may
- have a status of Unknown, which indicates that an error occurred while querying the status of
- that task.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). This method returns a list of service level tasks owned by a service
+ resource. Some tasks may have a status of Unknown, which indicates that an error occurred while
+ querying the status of that task.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -304,7 +290,6 @@ def list(
:type service_name: str
:param task_type: Filter tasks by task type. Default value is None.
:type task_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProjectTask or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.ProjectTask]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -312,12 +297,10 @@ def list(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.TaskList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -328,18 +311,16 @@ def list(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
task_type=task_type,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -351,13 +332,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("TaskList", pipeline_response)
@@ -367,10 +347,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -383,10 +364,6 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks"
- }
-
@overload
def create_or_update(
self,
@@ -401,9 +378,9 @@ def create_or_update(
"""Create or update service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The PUT method creates a new service task or updates an existing one, although
- since service tasks have no mutable custom properties, there is little reason to update an
- existing one.
+ DMS (classic) instance. The PUT method creates a new service task or updates an existing one,
+ although since service tasks have no mutable custom properties, there is little reason to
+ update an existing one.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -416,7 +393,6 @@ def create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -428,7 +404,7 @@ def create_or_update(
group_name: str,
service_name: str,
task_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -436,9 +412,9 @@ def create_or_update(
"""Create or update service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The PUT method creates a new service task or updates an existing one, although
- since service tasks have no mutable custom properties, there is little reason to update an
- existing one.
+ DMS (classic) instance. The PUT method creates a new service task or updates an existing one,
+ although since service tasks have no mutable custom properties, there is little reason to
+ update an existing one.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -447,11 +423,10 @@ def create_or_update(
:param task_name: Name of the Task. Required.
:type task_name: str
:param parameters: Information about the task. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -463,15 +438,15 @@ def create_or_update(
group_name: str,
service_name: str,
task_name: str,
- parameters: Union[_models.ProjectTask, IO],
+ parameters: Union[_models.ProjectTask, IO[bytes]],
**kwargs: Any
) -> _models.ProjectTask:
"""Create or update service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The PUT method creates a new service task or updates an existing one, although
- since service tasks have no mutable custom properties, there is little reason to update an
- existing one.
+ DMS (classic) instance. The PUT method creates a new service task or updates an existing one,
+ although since service tasks have no mutable custom properties, there is little reason to
+ update an existing one.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -479,17 +454,14 @@ def create_or_update(
:type service_name: str
:param task_name: Name of the Task. Required.
:type task_name: str
- :param parameters: Information about the task. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes]
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -500,21 +472,19 @@ def create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ProjectTask")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
group_name=group_name,
service_name=service_name,
task_name=task_name,
@@ -523,15 +493,14 @@ def create_or_update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -541,21 +510,13 @@ def create_or_update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("ProjectTask", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}"
- }
-
@distributed_trace
def get(
self, group_name: str, service_name: str, task_name: str, expand: Optional[str] = None, **kwargs: Any
@@ -563,7 +524,7 @@ def get(
"""Get service task information.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The GET method retrieves information about a service task.
+ DMS (classic) instance. The GET method retrieves information about a service task.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -573,12 +534,11 @@ def get(
:type task_name: str
:param expand: Expand the response. Default value is None.
:type expand: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -589,27 +549,24 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
group_name=group_name,
service_name=service_name,
task_name=task_name,
subscription_id=self._config.subscription_id,
expand=expand,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -619,16 +576,12 @@ def get(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}"
- }
+ return deserialized # type: ignore
@distributed_trace
def delete( # pylint: disable=inconsistent-return-statements
@@ -642,7 +595,8 @@ def delete( # pylint: disable=inconsistent-return-statements
"""Delete service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The DELETE method deletes a service task, canceling it first if it's running.
+ DMS (classic) instance. The DELETE method deletes a service task, canceling it first if it's
+ running.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -653,12 +607,11 @@ def delete( # pylint: disable=inconsistent-return-statements
:param delete_running_tasks: Delete the resource even if it contains running tasks. Default
value is None.
:type delete_running_tasks: bool
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -669,27 +622,24 @@ def delete( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
group_name=group_name,
service_name=service_name,
task_name=task_name,
subscription_id=self._config.subscription_id,
delete_running_tasks=delete_running_tasks,
api_version=api_version,
- template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -700,11 +650,7 @@ def delete( # pylint: disable=inconsistent-return-statements
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}"
- }
+ return cls(pipeline_response, None, {}) # type: ignore
@overload
def update(
@@ -720,8 +666,8 @@ def update(
"""Create or update service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The PATCH method updates an existing service task, but since service tasks have
- no mutable custom properties, there is little reason to do so.
+ DMS (classic) instance. The PATCH method updates an existing service task, but since service
+ tasks have no mutable custom properties, there is little reason to do so.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -734,7 +680,6 @@ def update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -746,7 +691,7 @@ def update(
group_name: str,
service_name: str,
task_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -754,8 +699,8 @@ def update(
"""Create or update service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The PATCH method updates an existing service task, but since service tasks have
- no mutable custom properties, there is little reason to do so.
+ DMS (classic) instance. The PATCH method updates an existing service task, but since service
+ tasks have no mutable custom properties, there is little reason to do so.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -764,11 +709,10 @@ def update(
:param task_name: Name of the Task. Required.
:type task_name: str
:param parameters: Information about the task. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -780,14 +724,14 @@ def update(
group_name: str,
service_name: str,
task_name: str,
- parameters: Union[_models.ProjectTask, IO],
+ parameters: Union[_models.ProjectTask, IO[bytes]],
**kwargs: Any
) -> _models.ProjectTask:
"""Create or update service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. The PATCH method updates an existing service task, but since service tasks have
- no mutable custom properties, there is little reason to do so.
+ DMS (classic) instance. The PATCH method updates an existing service task, but since service
+ tasks have no mutable custom properties, there is little reason to do so.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -795,17 +739,14 @@ def update(
:type service_name: str
:param task_name: Name of the Task. Required.
:type task_name: str
- :param parameters: Information about the task. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes]
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -816,21 +757,19 @@ def update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ProjectTask")
- request = build_update_request(
+ _request = build_update_request(
group_name=group_name,
service_name=service_name,
task_name=task_name,
@@ -839,15 +778,14 @@ def update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -857,23 +795,19 @@ def update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}"
- }
+ return deserialized # type: ignore
@distributed_trace
def cancel(self, group_name: str, service_name: str, task_name: str, **kwargs: Any) -> _models.ProjectTask:
"""Cancel a service task.
The service tasks resource is a nested, proxy-only resource representing work performed by a
- DMS instance. This method cancels a service task if it's currently queued or running.
+ DMS (classic) instance. This method cancels a service task if it's currently queued or running.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -881,12 +815,11 @@ def cancel(self, group_name: str, service_name: str, task_name: str, **kwargs: A
:type service_name: str
:param task_name: Name of the Task. Required.
:type task_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -897,26 +830,23 @@ def cancel(self, group_name: str, service_name: str, task_name: str, **kwargs: A
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
- request = build_cancel_request(
+ _request = build_cancel_request(
group_name=group_name,
service_name=service_name,
task_name=task_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.cancel.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -926,13 +856,9 @@ def cancel(self, group_name: str, service_name: str, task_name: str, **kwargs: A
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- cancel.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}/cancel"
- }
+ return deserialized # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_services_operations.py
index baf122c6c3e6..956894cce5a0 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_services_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_services_operations.py
@@ -6,8 +6,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -16,13 +17,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -30,12 +32,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -49,9 +50,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -66,7 +65,7 @@ def build_create_or_update_request(
"serviceName": _SERIALIZER.url("service_name", service_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -83,9 +82,7 @@ def build_get_request(group_name: str, service_name: str, subscription_id: str,
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -99,7 +96,7 @@ def build_get_request(group_name: str, service_name: str, subscription_id: str,
"serviceName": _SERIALIZER.url("service_name", service_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -121,9 +118,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -137,7 +132,7 @@ def build_delete_request(
"serviceName": _SERIALIZER.url("service_name", service_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -154,9 +149,7 @@ def build_update_request(group_name: str, service_name: str, subscription_id: st
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -171,7 +164,7 @@ def build_update_request(group_name: str, service_name: str, subscription_id: st
"serviceName": _SERIALIZER.url("service_name", service_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -188,9 +181,7 @@ def build_check_status_request(group_name: str, service_name: str, subscription_
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -204,7 +195,7 @@ def build_check_status_request(group_name: str, service_name: str, subscription_
"serviceName": _SERIALIZER.url("service_name", service_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -219,9 +210,7 @@ def build_start_request(group_name: str, service_name: str, subscription_id: str
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -235,7 +224,7 @@ def build_start_request(group_name: str, service_name: str, subscription_id: str
"serviceName": _SERIALIZER.url("service_name", service_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -250,9 +239,7 @@ def build_stop_request(group_name: str, service_name: str, subscription_id: str,
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -266,7 +253,7 @@ def build_stop_request(group_name: str, service_name: str, subscription_id: str,
"serviceName": _SERIALIZER.url("service_name", service_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -281,9 +268,7 @@ def build_list_skus_request(group_name: str, service_name: str, subscription_id:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -297,7 +282,7 @@ def build_list_skus_request(group_name: str, service_name: str, subscription_id:
"serviceName": _SERIALIZER.url("service_name", service_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -308,15 +293,13 @@ def build_list_skus_request(group_name: str, service_name: str, subscription_id:
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
-def build_check_children_name_availability_request(
+def build_check_children_name_availability_request( # pylint: disable=name-too-long
group_name: str, service_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -331,7 +314,7 @@ def build_check_children_name_availability_request(
"serviceName": _SERIALIZER.url("service_name", service_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -348,9 +331,7 @@ def build_list_by_resource_group_request(group_name: str, subscription_id: str,
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -363,7 +344,7 @@ def build_list_by_resource_group_request(group_name: str, subscription_id: str,
"groupName": _SERIALIZER.url("group_name", group_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -378,9 +359,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -389,7 +368,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest:
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -404,9 +383,7 @@ def build_check_name_availability_request(location: str, subscription_id: str, *
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -420,7 +397,7 @@ def build_check_name_availability_request(location: str, subscription_id: str, *
"location": _SERIALIZER.url("location", location, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -453,9 +430,13 @@ def __init__(self, *args, **kwargs):
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
def _create_or_update_initial(
- self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any
- ) -> Optional[_models.DataMigrationService]:
- error_map = {
+ self,
+ group_name: str,
+ service_name: str,
+ parameters: Union[_models.DataMigrationService, IO[bytes]],
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -466,21 +447,19 @@ def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.DataMigrationService]] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "DataMigrationService")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
@@ -488,39 +467,34 @@ def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize("DataMigrationService", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("DataMigrationService", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- return deserialized
-
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}"
- }
+ return deserialized # type: ignore
@overload
def begin_create_or_update(
@@ -532,16 +506,16 @@ def begin_create_or_update(
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.DataMigrationService]:
- """Create or update DMS Instance.
+ """Create or update DMS (classic) Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The PUT method creates a new service or updates an existing one. When a service is updated,
- existing child resources (i.e. tasks) are unaffected. Services currently support a single kind,
- "vm", which refers to a VM-based service, although other kinds may be added in the future. This
- method can change the kind, SKU, and network of the service, but if tasks are currently running
- (i.e. the service is busy), this will fail with 400 Bad Request ("ServiceIsBusy"). The provider
- will reply when successful with 200 OK or 201 Created. Long-running operations use the
- provisioningState property.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The PUT method creates a new service or updates an existing one. When a
+ service is updated, existing child resources (i.e. tasks) are unaffected. Services currently
+ support a single kind, "vm", which refers to a VM-based service, although other kinds may be
+ added in the future. This method can change the kind, SKU, and network of the service, but if
+ tasks are currently running (i.e. the service is busy), this will fail with 400 Bad Request
+ ("ServiceIsBusy"). The provider will reply when successful with 200 OK or 201 Created.
+ Long-running operations use the provisioningState property.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -552,14 +526,6 @@ def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either DataMigrationService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DataMigrationService]
@@ -571,39 +537,31 @@ def begin_create_or_update(
self,
group_name: str,
service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.DataMigrationService]:
- """Create or update DMS Instance.
+ """Create or update DMS (classic) Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The PUT method creates a new service or updates an existing one. When a service is updated,
- existing child resources (i.e. tasks) are unaffected. Services currently support a single kind,
- "vm", which refers to a VM-based service, although other kinds may be added in the future. This
- method can change the kind, SKU, and network of the service, but if tasks are currently running
- (i.e. the service is busy), this will fail with 400 Bad Request ("ServiceIsBusy"). The provider
- will reply when successful with 200 OK or 201 Created. Long-running operations use the
- provisioningState property.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The PUT method creates a new service or updates an existing one. When a
+ service is updated, existing child resources (i.e. tasks) are unaffected. Services currently
+ support a single kind, "vm", which refers to a VM-based service, although other kinds may be
+ added in the future. This method can change the kind, SKU, and network of the service, but if
+ tasks are currently running (i.e. the service is busy), this will fail with 400 Bad Request
+ ("ServiceIsBusy"). The provider will reply when successful with 200 OK or 201 Created.
+ Long-running operations use the provisioningState property.
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
:param parameters: Information about the service. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either DataMigrationService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DataMigrationService]
@@ -612,37 +570,30 @@ def begin_create_or_update(
@distributed_trace
def begin_create_or_update(
- self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any
+ self,
+ group_name: str,
+ service_name: str,
+ parameters: Union[_models.DataMigrationService, IO[bytes]],
+ **kwargs: Any
) -> LROPoller[_models.DataMigrationService]:
- """Create or update DMS Instance.
+ """Create or update DMS (classic) Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The PUT method creates a new service or updates an existing one. When a service is updated,
- existing child resources (i.e. tasks) are unaffected. Services currently support a single kind,
- "vm", which refers to a VM-based service, although other kinds may be added in the future. This
- method can change the kind, SKU, and network of the service, but if tasks are currently running
- (i.e. the service is busy), this will fail with 400 Bad Request ("ServiceIsBusy"). The provider
- will reply when successful with 200 OK or 201 Created. Long-running operations use the
- provisioningState property.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The PUT method creates a new service or updates an existing one. When a
+ service is updated, existing child resources (i.e. tasks) are unaffected. Services currently
+ support a single kind, "vm", which refers to a VM-based service, although other kinds may be
+ added in the future. This method can change the kind, SKU, and network of the service, but if
+ tasks are currently running (i.e. the service is busy), this will fail with 400 Bad Request
+ ("ServiceIsBusy"). The provider will reply when successful with 200 OK or 201 Created.
+ Long-running operations use the provisioningState property.
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :param parameters: Information about the service. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Information about the service. Is either a DataMigrationService type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO[bytes]
:return: An instance of LROPoller that returns either DataMigrationService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DataMigrationService]
@@ -651,9 +602,7 @@ def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DataMigrationService] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
@@ -671,12 +620,13 @@ def begin_create_or_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("DataMigrationService", pipeline_response)
+ deserialized = self._deserialize("DataMigrationService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -686,35 +636,32 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.DataMigrationService].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}"
- }
+ return LROPoller[_models.DataMigrationService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace
def get(self, group_name: str, service_name: str, **kwargs: Any) -> _models.DataMigrationService:
- """Get DMS Service Instance.
+ """Get DMS (classic) Service Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The GET method retrieves information about a service instance.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The GET method retrieves information about a service instance.
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: DataMigrationService or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DataMigrationService
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -725,25 +672,22 @@ def get(self, group_name: str, service_name: str, **kwargs: Any) -> _models.Data
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DataMigrationService] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -753,21 +697,17 @@ def get(self, group_name: str, service_name: str, **kwargs: Any) -> _models.Data
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("DataMigrationService", pipeline_response)
+ deserialized = self._deserialize("DataMigrationService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- return deserialized
+ return deserialized # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}"
- }
-
- def _delete_initial( # pylint: disable=inconsistent-return-statements
+ def _delete_initial(
self, group_name: str, service_name: str, delete_running_tasks: Optional[bool] = None, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -778,50 +718,52 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
delete_running_tasks=delete_running_tasks,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}"
- }
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(
self, group_name: str, service_name: str, delete_running_tasks: Optional[bool] = None, **kwargs: Any
) -> LROPoller[None]:
- """Delete DMS Service Instance.
+ """Delete DMS (classic) Service Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The DELETE method deletes a service. Any running tasks will be canceled.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The DELETE method deletes a service. Any running tasks will be canceled.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -830,14 +772,6 @@ def begin_delete(
:param delete_running_tasks: Delete the resource even if it contains running tasks. Default
value is None.
:type delete_running_tasks: bool
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -845,15 +779,13 @@ def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
group_name=group_name,
service_name=service_name,
delete_running_tasks=delete_running_tasks,
@@ -863,11 +795,12 @@ def begin_delete(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
@@ -876,22 +809,22 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}"
- }
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
def _update_initial(
- self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any
- ) -> Optional[_models.DataMigrationService]:
- error_map = {
+ self,
+ group_name: str,
+ service_name: str,
+ parameters: Union[_models.DataMigrationService, IO[bytes]],
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -902,21 +835,19 @@ def _update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.DataMigrationService]] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "DataMigrationService")
- request = build_update_request(
+ _request = build_update_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
@@ -924,36 +855,34 @@ def _update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize("DataMigrationService", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}"
- }
+ return deserialized # type: ignore
@overload
def begin_update(
@@ -965,12 +894,12 @@ def begin_update(
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.DataMigrationService]:
- """Create or update DMS Service Instance.
+ """Create or update DMS (classic) Service Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The PATCH method updates an existing service. This method can change the kind, SKU, and network
- of the service, but if tasks are currently running (i.e. the service is busy), this will fail
- with 400 Bad Request ("ServiceIsBusy").
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The PATCH method updates an existing service. This method can change the
+ kind, SKU, and network of the service, but if tasks are currently running (i.e. the service is
+ busy), this will fail with 400 Bad Request ("ServiceIsBusy").
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -981,14 +910,6 @@ def begin_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either DataMigrationService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DataMigrationService]
@@ -1000,35 +921,27 @@ def begin_update(
self,
group_name: str,
service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.DataMigrationService]:
- """Create or update DMS Service Instance.
+ """Create or update DMS (classic) Service Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The PATCH method updates an existing service. This method can change the kind, SKU, and network
- of the service, but if tasks are currently running (i.e. the service is busy), this will fail
- with 400 Bad Request ("ServiceIsBusy").
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The PATCH method updates an existing service. This method can change the
+ kind, SKU, and network of the service, but if tasks are currently running (i.e. the service is
+ busy), this will fail with 400 Bad Request ("ServiceIsBusy").
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
:param parameters: Information about the service. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either DataMigrationService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DataMigrationService]
@@ -1037,33 +950,26 @@ def begin_update(
@distributed_trace
def begin_update(
- self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any
+ self,
+ group_name: str,
+ service_name: str,
+ parameters: Union[_models.DataMigrationService, IO[bytes]],
+ **kwargs: Any
) -> LROPoller[_models.DataMigrationService]:
- """Create or update DMS Service Instance.
+ """Create or update DMS (classic) Service Instance.
- The services resource is the top-level resource that represents the Database Migration Service.
- The PATCH method updates an existing service. This method can change the kind, SKU, and network
- of the service, but if tasks are currently running (i.e. the service is busy), this will fail
- with 400 Bad Request ("ServiceIsBusy").
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). The PATCH method updates an existing service. This method can change the
+ kind, SKU, and network of the service, but if tasks are currently running (i.e. the service is
+ busy), this will fail with 400 Bad Request ("ServiceIsBusy").
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :param parameters: Information about the service. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Information about the service. Is either a DataMigrationService type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO[bytes]
:return: An instance of LROPoller that returns either DataMigrationService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DataMigrationService]
@@ -1072,9 +978,7 @@ def begin_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DataMigrationService] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
@@ -1092,12 +996,13 @@ def begin_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("DataMigrationService", pipeline_response)
+ deserialized = self._deserialize("DataMigrationService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -1107,17 +1012,15 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.DataMigrationService].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}"
- }
+ return LROPoller[_models.DataMigrationService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace
def check_status(
@@ -1125,20 +1028,19 @@ def check_status(
) -> _models.DataMigrationServiceStatusResponse:
"""Check service health status.
- The services resource is the top-level resource that represents the Database Migration Service.
- This action performs a health check and returns the status of the service and virtual machine
- size.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). This action performs a health check and returns the status of the service
+ and virtual machine size.
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: DataMigrationServiceStatusResponse or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DataMigrationServiceStatusResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1149,25 +1051,22 @@ def check_status(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DataMigrationServiceStatusResponse] = kwargs.pop("cls", None)
- request = build_check_status_request(
+ _request = build_check_status_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.check_status.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1177,21 +1076,15 @@ def check_status(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("DataMigrationServiceStatusResponse", pipeline_response)
+ deserialized = self._deserialize("DataMigrationServiceStatusResponse", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- return deserialized
+ return deserialized # type: ignore
- check_status.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/checkStatus"
- }
-
- def _start_initial( # pylint: disable=inconsistent-return-statements
- self, group_name: str, service_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ def _start_initial(self, group_name: str, service_name: str, **kwargs: Any) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1202,60 +1095,55 @@ def _start_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- request = build_start_request(
+ _request = build_start_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._start_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _start_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/start"
- }
+ return deserialized # type: ignore
@distributed_trace
def begin_start(self, group_name: str, service_name: str, **kwargs: Any) -> LROPoller[None]:
"""Start service.
- The services resource is the top-level resource that represents the Database Migration Service.
- This action starts the service and the service can be used for data migration.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). This action starts the service and the service can be used for data
+ migration.
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1263,15 +1151,13 @@ def begin_start(self, group_name: str, service_name: str, **kwargs: Any) -> LROP
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._start_initial( # type: ignore
+ raw_result = self._start_initial(
group_name=group_name,
service_name=service_name,
api_version=api_version,
@@ -1280,11 +1166,12 @@ def begin_start(self, group_name: str, service_name: str, **kwargs: Any) -> LROP
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
@@ -1293,22 +1180,16 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- begin_start.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/start"
- }
-
- def _stop_initial( # pylint: disable=inconsistent-return-statements
- self, group_name: str, service_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ def _stop_initial(self, group_name: str, service_name: str, **kwargs: Any) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1319,61 +1200,55 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- request = build_stop_request(
+ _request = build_stop_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._stop_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _stop_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/stop"
- }
+ return deserialized # type: ignore
@distributed_trace
def begin_stop(self, group_name: str, service_name: str, **kwargs: Any) -> LROPoller[None]:
"""Stop service.
- The services resource is the top-level resource that represents the Database Migration Service.
- This action stops the service and the service cannot be used for data migration. The service
- owner won't be billed when the service is stopped.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). This action stops the service and the service cannot be used for data
+ migration. The service owner won't be billed when the service is stopped.
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1381,15 +1256,13 @@ def begin_stop(self, group_name: str, service_name: str, **kwargs: Any) -> LROPo
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._stop_initial( # type: ignore
+ raw_result = self._stop_initial(
group_name=group_name,
service_name=service_name,
api_version=api_version,
@@ -1398,11 +1271,12 @@ def begin_stop(self, group_name: str, service_name: str, **kwargs: Any) -> LROPo
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
@@ -1411,30 +1285,25 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_stop.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/stop"
- }
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace
def list_skus(self, group_name: str, service_name: str, **kwargs: Any) -> Iterable["_models.AvailableServiceSku"]:
"""Get compatible SKUs.
- The services resource is the top-level resource that represents the Database Migration Service.
- The skus action returns the list of SKUs that a service resource can be updated to.
+ The services resource is the top-level resource that represents the Database Migration Service
+ (classic). The skus action returns the list of SKUs that a service resource can be updated to.
:param group_name: Name of the resource group. Required.
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AvailableServiceSku or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.AvailableServiceSku]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1442,12 +1311,10 @@ def list_skus(self, group_name: str, service_name: str, **kwargs: Any) -> Iterab
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ServiceSkuList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1458,17 +1325,15 @@ def list_skus(self, group_name: str, service_name: str, **kwargs: Any) -> Iterab
def prepare_request(next_link=None):
if not next_link:
- request = build_list_skus_request(
+ _request = build_list_skus_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_skus.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1480,13 +1345,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("ServiceSkuList", pipeline_response)
@@ -1496,10 +1360,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1512,10 +1377,6 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list_skus.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/skus"
- }
-
@overload
def check_children_name_availability(
self,
@@ -1539,7 +1400,6 @@ def check_children_name_availability(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: NameAvailabilityResponse or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1550,7 +1410,7 @@ def check_children_name_availability(
self,
group_name: str,
service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -1564,11 +1424,10 @@ def check_children_name_availability(
:param service_name: Name of the service. Required.
:type service_name: str
:param parameters: Requested name to validate. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: NameAvailabilityResponse or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1576,7 +1435,11 @@ def check_children_name_availability(
@distributed_trace
def check_children_name_availability(
- self, group_name: str, service_name: str, parameters: Union[_models.NameAvailabilityRequest, IO], **kwargs: Any
+ self,
+ group_name: str,
+ service_name: str,
+ parameters: Union[_models.NameAvailabilityRequest, IO[bytes]],
+ **kwargs: Any
) -> _models.NameAvailabilityResponse:
"""Check nested resource name validity and availability.
@@ -1586,17 +1449,14 @@ def check_children_name_availability(
:type group_name: str
:param service_name: Name of the service. Required.
:type service_name: str
- :param parameters: Requested name to validate. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Requested name to validate. Is either a NameAvailabilityRequest type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO[bytes]
:return: NameAvailabilityResponse or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1607,21 +1467,19 @@ def check_children_name_availability(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.NameAvailabilityResponse] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "NameAvailabilityRequest")
- request = build_check_children_name_availability_request(
+ _request = build_check_children_name_availability_request(
group_name=group_name,
service_name=service_name,
subscription_id=self._config.subscription_id,
@@ -1629,15 +1487,14 @@ def check_children_name_availability(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.check_children_name_availability.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1647,27 +1504,22 @@ def check_children_name_availability(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response)
+ deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- check_children_name_availability.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/checkNameAvailability"
- }
+ return deserialized # type: ignore
@distributed_trace
def list_by_resource_group(self, group_name: str, **kwargs: Any) -> Iterable["_models.DataMigrationService"]:
"""Get services in resource group.
- The Services resource is the top-level resource that represents the Database Migration Service.
- This method returns a list of service resources in a resource group.
+ The Services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). This method returns a list of service resources in a resource group.
:param group_name: Name of the resource group. Required.
:type group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DataMigrationService or the result of
cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.DataMigrationService]
@@ -1676,12 +1528,10 @@ def list_by_resource_group(self, group_name: str, **kwargs: Any) -> Iterable["_m
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DataMigrationServiceList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1692,16 +1542,14 @@ def list_by_resource_group(self, group_name: str, **kwargs: Any) -> Iterable["_m
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_resource_group_request(
+ _request = build_list_by_resource_group_request(
group_name=group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1713,13 +1561,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("DataMigrationServiceList", pipeline_response)
@@ -1729,10 +1576,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1745,18 +1593,13 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list_by_resource_group.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services"
- }
-
@distributed_trace
def list(self, **kwargs: Any) -> Iterable["_models.DataMigrationService"]:
"""Get services in subscription.
- The services resource is the top-level resource that represents the Database Migration Service.
- This method returns a list of service resources in a subscription.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). This method returns a list of service resources in a subscription.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DataMigrationService or the result of
cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.DataMigrationService]
@@ -1765,12 +1608,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.DataMigrationService"]:
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DataMigrationServiceList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1781,15 +1622,13 @@ def list(self, **kwargs: Any) -> Iterable["_models.DataMigrationService"]:
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1801,13 +1640,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("DataMigrationServiceList", pipeline_response)
@@ -1817,10 +1655,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1833,8 +1672,6 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/services"}
-
@overload
def check_name_availability(
self,
@@ -1855,7 +1692,6 @@ def check_name_availability(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: NameAvailabilityResponse or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1863,7 +1699,7 @@ def check_name_availability(
@overload
def check_name_availability(
- self, location: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
+ self, location: str, parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any
) -> _models.NameAvailabilityResponse:
"""Check name validity and availability.
@@ -1872,11 +1708,10 @@ def check_name_availability(
:param location: The Azure region of the operation. Required.
:type location: str
:param parameters: Requested name to validate. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: NameAvailabilityResponse or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1884,7 +1719,7 @@ def check_name_availability(
@distributed_trace
def check_name_availability(
- self, location: str, parameters: Union[_models.NameAvailabilityRequest, IO], **kwargs: Any
+ self, location: str, parameters: Union[_models.NameAvailabilityRequest, IO[bytes]], **kwargs: Any
) -> _models.NameAvailabilityResponse:
"""Check name validity and availability.
@@ -1892,17 +1727,14 @@ def check_name_availability(
:param location: The Azure region of the operation. Required.
:type location: str
- :param parameters: Requested name to validate. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Requested name to validate. Is either a NameAvailabilityRequest type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO[bytes]
:return: NameAvailabilityResponse or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1913,36 +1745,33 @@ def check_name_availability(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.NameAvailabilityResponse] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "NameAvailabilityRequest")
- request = build_check_name_availability_request(
+ _request = build_check_name_availability_request(
location=location,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
- template_url=self.check_name_availability.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1952,13 +1781,9 @@ def check_name_availability(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response)
+ deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- return deserialized
-
- check_name_availability.metadata = {
- "url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/locations/{location}/checkNameAvailability"
- }
+ return deserialized # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_sql_migration_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_sql_migration_services_operations.py
index 9fe524f5df4d..5c06149bebd1 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_sql_migration_services_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_sql_migration_services_operations.py
@@ -6,8 +6,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -16,13 +17,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -30,12 +32,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -49,9 +50,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -65,7 +64,7 @@ def build_get_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -82,9 +81,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -99,7 +96,7 @@ def build_create_or_update_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -117,9 +114,7 @@ def build_delete_request(
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
# Construct URL
_url = kwargs.pop(
"template_url",
@@ -131,7 +126,7 @@ def build_delete_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -145,9 +140,7 @@ def build_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -162,7 +155,7 @@ def build_update_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -179,9 +172,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -194,7 +185,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -211,9 +202,7 @@ def build_list_auth_keys_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -227,7 +216,7 @@ def build_list_auth_keys_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -244,9 +233,7 @@ def build_regenerate_auth_keys_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -261,7 +248,7 @@ def build_regenerate_auth_keys_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -280,9 +267,7 @@ def build_delete_node_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -297,7 +282,7 @@ def build_delete_node_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -316,9 +301,7 @@ def build_list_migrations_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -332,7 +315,7 @@ def build_list_migrations_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -349,9 +332,7 @@ def build_list_monitoring_data_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -365,7 +346,7 @@ def build_list_monitoring_data_request(
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -380,9 +361,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -393,7 +372,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -434,12 +413,11 @@ def get(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: SqlMigrationService or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.SqlMigrationService
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -450,25 +428,22 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -477,25 +452,21 @@ def get(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("SqlMigrationService", pipeline_response)
+ deserialized = self._deserialize("SqlMigrationService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}"
- }
+ return deserialized # type: ignore
def _create_or_update_initial(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: Union[_models.SqlMigrationService, IO],
+ parameters: Union[_models.SqlMigrationService, IO[bytes]],
**kwargs: Any
- ) -> _models.SqlMigrationService:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -506,21 +477,19 @@ def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "SqlMigrationService")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
@@ -528,38 +497,34 @@ def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("SqlMigrationService", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("SqlMigrationService", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}"
- }
-
@overload
def begin_create_or_update(
self,
@@ -582,14 +547,6 @@ def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either SqlMigrationService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.SqlMigrationService]
@@ -601,7 +558,7 @@ def begin_create_or_update(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -614,18 +571,10 @@ def begin_create_or_update(
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
:param parameters: Details of SqlMigrationService resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either SqlMigrationService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.SqlMigrationService]
@@ -637,7 +586,7 @@ def begin_create_or_update(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: Union[_models.SqlMigrationService, IO],
+ parameters: Union[_models.SqlMigrationService, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.SqlMigrationService]:
"""Create or Update Database Migration Service.
@@ -647,20 +596,9 @@ def begin_create_or_update(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO
- type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationService or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Details of SqlMigrationService resource. Is either a SqlMigrationService
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationService or IO[bytes]
:return: An instance of LROPoller that returns either SqlMigrationService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.SqlMigrationService]
@@ -669,9 +607,7 @@ def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
@@ -689,12 +625,13 @@ def begin_create_or_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("SqlMigrationService", pipeline_response)
+ deserialized = self._deserialize("SqlMigrationService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -704,22 +641,20 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.SqlMigrationService].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}"
- }
+ return LROPoller[_models.SqlMigrationService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- def _delete_initial( # pylint: disable=inconsistent-return-statements
+ def _delete_initial(
self, resource_group_name: str, sql_migration_service_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -730,39 +665,41 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}"
- }
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(self, resource_group_name: str, sql_migration_service_name: str, **kwargs: Any) -> LROPoller[None]:
@@ -773,14 +710,6 @@ def begin_delete(self, resource_group_name: str, sql_migration_service_name: str
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -788,15 +717,13 @@ def begin_delete(self, resource_group_name: str, sql_migration_service_name: str
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
api_version=api_version,
@@ -805,11 +732,12 @@ def begin_delete(self, resource_group_name: str, sql_migration_service_name: str
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
@@ -818,26 +746,22 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}"
- }
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
def _update_initial(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: Union[_models.SqlMigrationServiceUpdate, IO],
+ parameters: Union[_models.SqlMigrationServiceUpdate, IO[bytes]],
**kwargs: Any
- ) -> _models.SqlMigrationService:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -848,21 +772,19 @@ def _update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "SqlMigrationServiceUpdate")
- request = build_update_request(
+ _request = build_update_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
@@ -870,38 +792,34 @@ def _update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("SqlMigrationService", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("SqlMigrationService", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}"
- }
-
@overload
def begin_update(
self,
@@ -924,14 +842,6 @@ def begin_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either SqlMigrationService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.SqlMigrationService]
@@ -943,7 +853,7 @@ def begin_update(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -956,18 +866,10 @@ def begin_update(
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
:param parameters: Details of SqlMigrationService resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either SqlMigrationService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.SqlMigrationService]
@@ -979,7 +881,7 @@ def begin_update(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: Union[_models.SqlMigrationServiceUpdate, IO],
+ parameters: Union[_models.SqlMigrationServiceUpdate, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.SqlMigrationService]:
"""Update Database Migration Service.
@@ -989,20 +891,9 @@ def begin_update(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO
- type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationServiceUpdate or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Details of SqlMigrationService resource. Is either a
+ SqlMigrationServiceUpdate type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationServiceUpdate or IO[bytes]
:return: An instance of LROPoller that returns either SqlMigrationService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.SqlMigrationService]
@@ -1011,9 +902,7 @@ def begin_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
@@ -1031,12 +920,13 @@ def begin_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("SqlMigrationService", pipeline_response)
+ deserialized = self._deserialize("SqlMigrationService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -1046,17 +936,15 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.SqlMigrationService].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}"
- }
+ return LROPoller[_models.SqlMigrationService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace
def list_by_resource_group(
@@ -1067,7 +955,6 @@ def list_by_resource_group(
:param resource_group_name: Name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SqlMigrationService or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.SqlMigrationService]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1075,12 +962,10 @@ def list_by_resource_group(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.SqlMigrationListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1091,16 +976,14 @@ def list_by_resource_group(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_resource_group_request(
+ _request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1112,13 +995,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("SqlMigrationListResult", pipeline_response)
@@ -1128,10 +1010,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1143,10 +1026,6 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list_by_resource_group.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices"
- }
-
@distributed_trace
def list_auth_keys(
self, resource_group_name: str, sql_migration_service_name: str, **kwargs: Any
@@ -1158,12 +1037,11 @@ def list_auth_keys(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: AuthenticationKeys or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.AuthenticationKeys
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1174,25 +1052,22 @@ def list_auth_keys(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.AuthenticationKeys] = kwargs.pop("cls", None)
- request = build_list_auth_keys_request(
+ _request = build_list_auth_keys_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_auth_keys.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1201,16 +1076,12 @@ def list_auth_keys(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("AuthenticationKeys", pipeline_response)
+ deserialized = self._deserialize("AuthenticationKeys", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- list_auth_keys.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/listAuthKeys"
- }
+ return deserialized # type: ignore
@overload
def regenerate_auth_keys(
@@ -1234,7 +1105,6 @@ def regenerate_auth_keys(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: RegenAuthKeys or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.RegenAuthKeys
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1245,7 +1115,7 @@ def regenerate_auth_keys(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -1258,11 +1128,10 @@ def regenerate_auth_keys(
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
:param parameters: Details of SqlMigrationService resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: RegenAuthKeys or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.RegenAuthKeys
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1273,7 +1142,7 @@ def regenerate_auth_keys(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: Union[_models.RegenAuthKeys, IO],
+ parameters: Union[_models.RegenAuthKeys, IO[bytes]],
**kwargs: Any
) -> _models.RegenAuthKeys:
"""Regenerate a new set of Authentication Keys for Self Hosted Integration Runtime.
@@ -1283,18 +1152,14 @@ def regenerate_auth_keys(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO
- type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.RegenAuthKeys or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Details of SqlMigrationService resource. Is either a RegenAuthKeys type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.RegenAuthKeys or IO[bytes]
:return: RegenAuthKeys or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.RegenAuthKeys
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1305,21 +1170,19 @@ def regenerate_auth_keys(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.RegenAuthKeys] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "RegenAuthKeys")
- request = build_regenerate_auth_keys_request(
+ _request = build_regenerate_auth_keys_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
@@ -1327,15 +1190,14 @@ def regenerate_auth_keys(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.regenerate_auth_keys.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1344,16 +1206,12 @@ def regenerate_auth_keys(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("RegenAuthKeys", pipeline_response)
+ deserialized = self._deserialize("RegenAuthKeys", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- regenerate_auth_keys.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/regenerateAuthKeys"
- }
+ return deserialized # type: ignore
@overload
def delete_node(
@@ -1377,7 +1235,6 @@ def delete_node(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: DeleteNode or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DeleteNode
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1388,7 +1245,7 @@ def delete_node(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -1401,11 +1258,10 @@ def delete_node(
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
:param parameters: Details of SqlMigrationService resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: DeleteNode or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DeleteNode
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1416,7 +1272,7 @@ def delete_node(
self,
resource_group_name: str,
sql_migration_service_name: str,
- parameters: Union[_models.DeleteNode, IO],
+ parameters: Union[_models.DeleteNode, IO[bytes]],
**kwargs: Any
) -> _models.DeleteNode:
"""Delete the integration runtime node.
@@ -1426,18 +1282,14 @@ def delete_node(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO
- type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.DeleteNode or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Details of SqlMigrationService resource. Is either a DeleteNode type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.DeleteNode or IO[bytes]
:return: DeleteNode or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.DeleteNode
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1448,21 +1300,19 @@ def delete_node(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DeleteNode] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "DeleteNode")
- request = build_delete_node_request(
+ _request = build_delete_node_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
@@ -1470,15 +1320,14 @@ def delete_node(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.delete_node.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1487,16 +1336,12 @@ def delete_node(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("DeleteNode", pipeline_response)
+ deserialized = self._deserialize("DeleteNode", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- delete_node.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/deleteNode"
- }
+ return deserialized # type: ignore
@distributed_trace
def list_migrations(
@@ -1509,7 +1354,6 @@ def list_migrations(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DatabaseMigration or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.DatabaseMigration]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1517,12 +1361,10 @@ def list_migrations(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.DatabaseMigrationListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1533,17 +1375,15 @@ def list_migrations(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_migrations_request(
+ _request = build_list_migrations_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_migrations.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1555,13 +1395,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("DatabaseMigrationListResult", pipeline_response)
@@ -1571,10 +1410,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1586,10 +1426,6 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list_migrations.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/listMigrations"
- }
-
@distributed_trace
def list_monitoring_data(
self, resource_group_name: str, sql_migration_service_name: str, **kwargs: Any
@@ -1602,12 +1438,11 @@ def list_monitoring_data(
:type resource_group_name: str
:param sql_migration_service_name: Name of the SQL Migration Service. Required.
:type sql_migration_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: IntegrationRuntimeMonitoringData or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.IntegrationRuntimeMonitoringData
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1618,25 +1453,22 @@ def list_monitoring_data(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.IntegrationRuntimeMonitoringData] = kwargs.pop("cls", None)
- request = build_list_monitoring_data_request(
+ _request = build_list_monitoring_data_request(
resource_group_name=resource_group_name,
sql_migration_service_name=sql_migration_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_monitoring_data.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1645,22 +1477,17 @@ def list_monitoring_data(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response)
+ deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- list_monitoring_data.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/listMonitoringData"
- }
+ return deserialized # type: ignore
@distributed_trace
def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.SqlMigrationService"]:
"""Retrieve all SQL migration services in the subscriptions.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SqlMigrationService or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.SqlMigrationService]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1668,12 +1495,10 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.SqlMigrationS
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.SqlMigrationListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1684,15 +1509,13 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.SqlMigrationS
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_subscription_request(
+ _request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_subscription.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1704,13 +1527,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("SqlMigrationListResult", pipeline_response)
@@ -1720,10 +1542,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1734,7 +1557,3 @@ def get_next(next_link=None):
return pipeline_response
return ItemPaged(get_next, extract_data)
-
- list_by_subscription.metadata = {
- "url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/sqlMigrationServices"
- }
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_tasks_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_tasks_operations.py
index 1816624493cb..c2bb11c53a9e 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_tasks_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_tasks_operations.py
@@ -6,6 +6,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
@@ -20,20 +21,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -53,9 +52,7 @@ def build_list_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -70,7 +67,7 @@ def build_list_request(
"projectName": _SERIALIZER.url("project_name", project_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -89,9 +86,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -108,7 +103,7 @@ def build_create_or_update_request(
"taskName": _SERIALIZER.url("task_name", task_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -134,9 +129,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -152,7 +145,7 @@ def build_get_request(
"taskName": _SERIALIZER.url("task_name", task_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -178,9 +171,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -196,7 +187,7 @@ def build_delete_request(
"taskName": _SERIALIZER.url("task_name", task_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -215,9 +206,7 @@ def build_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -234,7 +223,7 @@ def build_update_request(
"taskName": _SERIALIZER.url("task_name", task_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -253,9 +242,7 @@ def build_cancel_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -271,7 +258,7 @@ def build_cancel_request(
"taskName": _SERIALIZER.url("task_name", task_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -288,9 +275,7 @@ def build_command_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -307,7 +292,7 @@ def build_command_request(
"taskName": _SERIALIZER.url("task_name", task_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -345,9 +330,10 @@ def list(
) -> Iterable["_models.ProjectTask"]:
"""Get tasks in a service.
- The services resource is the top-level resource that represents the Database Migration Service.
- This method returns a list of tasks owned by a service resource. Some tasks may have a status
- of Unknown, which indicates that an error occurred while querying the status of that task.
+ The services resource is the top-level resource that represents the Azure Database Migration
+ Service (classic). This method returns a list of tasks owned by a service resource. Some tasks
+ may have a status of Unknown, which indicates that an error occurred while querying the status
+ of that task.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -357,7 +343,6 @@ def list(
:type project_name: str
:param task_type: Filter tasks by task type. Default value is None.
:type task_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProjectTask or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.ProjectTask]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -365,12 +350,10 @@ def list(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.TaskList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -381,19 +364,17 @@ def list(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
subscription_id=self._config.subscription_id,
task_type=task_type,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -405,13 +386,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("TaskList", pipeline_response)
@@ -421,10 +401,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -437,10 +418,6 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks"
- }
-
@overload
def create_or_update(
self,
@@ -456,8 +433,9 @@ def create_or_update(
"""Create or update task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The PUT method creates a new task or updates an existing one, although since tasks
- have no mutable custom properties, there is little reason to update an existing one.
+ (classic) instance. The PUT method creates a new task or updates an existing one, although
+ since tasks have no mutable custom properties, there is little reason to update an existing
+ one.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -472,7 +450,6 @@ def create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -485,7 +462,7 @@ def create_or_update(
service_name: str,
project_name: str,
task_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -493,8 +470,9 @@ def create_or_update(
"""Create or update task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The PUT method creates a new task or updates an existing one, although since tasks
- have no mutable custom properties, there is little reason to update an existing one.
+ (classic) instance. The PUT method creates a new task or updates an existing one, although
+ since tasks have no mutable custom properties, there is little reason to update an existing
+ one.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -505,11 +483,10 @@ def create_or_update(
:param task_name: Name of the Task. Required.
:type task_name: str
:param parameters: Information about the task. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -522,14 +499,15 @@ def create_or_update(
service_name: str,
project_name: str,
task_name: str,
- parameters: Union[_models.ProjectTask, IO],
+ parameters: Union[_models.ProjectTask, IO[bytes]],
**kwargs: Any
) -> _models.ProjectTask:
"""Create or update task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The PUT method creates a new task or updates an existing one, although since tasks
- have no mutable custom properties, there is little reason to update an existing one.
+ (classic) instance. The PUT method creates a new task or updates an existing one, although
+ since tasks have no mutable custom properties, there is little reason to update an existing
+ one.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -539,17 +517,14 @@ def create_or_update(
:type project_name: str
:param task_name: Name of the Task. Required.
:type task_name: str
- :param parameters: Information about the task. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes]
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -560,21 +535,19 @@ def create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ProjectTask")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -584,15 +557,14 @@ def create_or_update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -602,21 +574,13 @@ def create_or_update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("ProjectTask", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}"
- }
-
@distributed_trace
def get(
self,
@@ -630,7 +594,7 @@ def get(
"""Get task information.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The GET method retrieves information about a task.
+ (classic) instance. The GET method retrieves information about a task.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -642,12 +606,11 @@ def get(
:type task_name: str
:param expand: Expand the response. Default value is None.
:type expand: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -658,12 +621,10 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -671,15 +632,14 @@ def get(
subscription_id=self._config.subscription_id,
expand=expand,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -689,16 +649,12 @@ def get(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}"
- }
+ return deserialized # type: ignore
@distributed_trace
def delete( # pylint: disable=inconsistent-return-statements
@@ -713,7 +669,7 @@ def delete( # pylint: disable=inconsistent-return-statements
"""Delete task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The DELETE method deletes a task, canceling it first if it's running.
+ (classic) instance. The DELETE method deletes a task, canceling it first if it's running.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -726,12 +682,11 @@ def delete( # pylint: disable=inconsistent-return-statements
:param delete_running_tasks: Delete the resource even if it contains running tasks. Default
value is None.
:type delete_running_tasks: bool
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -742,12 +697,10 @@ def delete( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -755,15 +708,14 @@ def delete( # pylint: disable=inconsistent-return-statements
subscription_id=self._config.subscription_id,
delete_running_tasks=delete_running_tasks,
api_version=api_version,
- template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -774,11 +726,7 @@ def delete( # pylint: disable=inconsistent-return-statements
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}"
- }
+ return cls(pipeline_response, None, {}) # type: ignore
@overload
def update(
@@ -795,8 +743,8 @@ def update(
"""Create or update task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The PATCH method updates an existing task, but since tasks have no mutable custom
- properties, there is little reason to do so.
+ (classic) instance. The PATCH method updates an existing task, but since tasks have no mutable
+ custom properties, there is little reason to do so.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -811,7 +759,6 @@ def update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -824,7 +771,7 @@ def update(
service_name: str,
project_name: str,
task_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -832,8 +779,8 @@ def update(
"""Create or update task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The PATCH method updates an existing task, but since tasks have no mutable custom
- properties, there is little reason to do so.
+ (classic) instance. The PATCH method updates an existing task, but since tasks have no mutable
+ custom properties, there is little reason to do so.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -844,11 +791,10 @@ def update(
:param task_name: Name of the Task. Required.
:type task_name: str
:param parameters: Information about the task. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
@@ -861,14 +807,14 @@ def update(
service_name: str,
project_name: str,
task_name: str,
- parameters: Union[_models.ProjectTask, IO],
+ parameters: Union[_models.ProjectTask, IO[bytes]],
**kwargs: Any
) -> _models.ProjectTask:
"""Create or update task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. The PATCH method updates an existing task, but since tasks have no mutable custom
- properties, there is little reason to do so.
+ (classic) instance. The PATCH method updates an existing task, but since tasks have no mutable
+ custom properties, there is little reason to do so.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -878,17 +824,14 @@ def update(
:type project_name: str
:param task_name: Name of the Task. Required.
:type task_name: str
- :param parameters: Information about the task. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes]
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -899,21 +842,19 @@ def update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ProjectTask")
- request = build_update_request(
+ _request = build_update_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -923,15 +864,14 @@ def update(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -941,16 +881,12 @@ def update(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}"
- }
+ return deserialized # type: ignore
@distributed_trace
def cancel(
@@ -959,7 +895,7 @@ def cancel(
"""Cancel a task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. This method cancels a task if it's currently queued or running.
+ (classic) instance. This method cancels a task if it's currently queued or running.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -969,12 +905,11 @@ def cancel(
:type project_name: str
:param task_name: Name of the Task. Required.
:type task_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ProjectTask or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.ProjectTask
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -985,27 +920,24 @@ def cancel(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None)
- request = build_cancel_request(
+ _request = build_cancel_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
task_name=task_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.cancel.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1015,16 +947,12 @@ def cancel(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ProjectTask", pipeline_response)
+ deserialized = self._deserialize("ProjectTask", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- cancel.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}/cancel"
- }
+ return deserialized # type: ignore
@overload
def command(
@@ -1041,7 +969,7 @@ def command(
"""Execute a command on a task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. This method executes a command on a running task.
+ (classic) instance. This method executes a command on a running task.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -1056,7 +984,6 @@ def command(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: CommandProperties or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.CommandProperties
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1069,7 +996,7 @@ def command(
service_name: str,
project_name: str,
task_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -1077,7 +1004,7 @@ def command(
"""Execute a command on a task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. This method executes a command on a running task.
+ (classic) instance. This method executes a command on a running task.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -1088,11 +1015,10 @@ def command(
:param task_name: Name of the Task. Required.
:type task_name: str
:param parameters: Command to execute. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: CommandProperties or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.CommandProperties
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1105,13 +1031,13 @@ def command(
service_name: str,
project_name: str,
task_name: str,
- parameters: Union[_models.CommandProperties, IO],
+ parameters: Union[_models.CommandProperties, IO[bytes]],
**kwargs: Any
) -> _models.CommandProperties:
"""Execute a command on a task.
The tasks resource is a nested, proxy-only resource representing work performed by a DMS
- instance. This method executes a command on a running task.
+ (classic) instance. This method executes a command on a running task.
:param group_name: Name of the resource group. Required.
:type group_name: str
@@ -1121,17 +1047,14 @@ def command(
:type project_name: str
:param task_name: Name of the Task. Required.
:type task_name: str
- :param parameters: Command to execute. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.datamigration.models.CommandProperties or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Command to execute. Is either a CommandProperties type or a IO[bytes] type.
+ Required.
+ :type parameters: ~azure.mgmt.datamigration.models.CommandProperties or IO[bytes]
:return: CommandProperties or the result of cls(response)
:rtype: ~azure.mgmt.datamigration.models.CommandProperties
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1142,21 +1065,19 @@ def command(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.CommandProperties] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "CommandProperties")
- request = build_command_request(
+ _request = build_command_request(
group_name=group_name,
service_name=service_name,
project_name=project_name,
@@ -1166,15 +1087,14 @@ def command(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.command.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1184,13 +1104,9 @@ def command(
error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("CommandProperties", pipeline_response)
+ deserialized = self._deserialize("CommandProperties", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- command.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}/command"
- }
+ return deserialized # type: ignore
diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_usages_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_usages_operations.py
index 7fc08418ab7c..9927407d1a03 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_usages_operations.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_usages_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -20,20 +19,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -45,9 +42,7 @@ def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> Ht
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", "2022-03-30-preview")
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -59,7 +54,7 @@ def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> Ht
"location": _SERIALIZER.url("location", location, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -93,12 +88,11 @@ def __init__(self, *args, **kwargs):
def list(self, location: str, **kwargs: Any) -> Iterable["_models.Quota"]:
"""Get resource quotas and usage information.
- This method returns region-specific quotas and resource usage information for the Database
- Migration Service.
+ This method returns region-specific quotas and resource usage information for the Azure
+ Database Migration Service (classic).
:param location: The Azure region of the operation. Required.
:type location: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Quota or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.Quota]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -106,12 +100,10 @@ def list(self, location: str, **kwargs: Any) -> Iterable["_models.Quota"]:
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: Literal["2022-03-30-preview"] = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- )
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.QuotaList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -122,16 +114,14 @@ def list(self, location: str, **kwargs: Any) -> Iterable["_models.Quota"]:
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
location=location,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -143,13 +133,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("QuotaList", pipeline_response)
@@ -159,10 +148,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
+ _stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -174,7 +164,3 @@ def get_next(next_link=None):
return pipeline_response
return ItemPaged(get_next, extract_data)
-
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/locations/{location}/usages"
- }
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_create_or_update_database_migration_max.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_create_or_update_database_migration_max.py
new file mode 100644
index 000000000000..56efdd67fd45
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_create_or_update_database_migration_max.py
@@ -0,0 +1,78 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-datamigration
+# USAGE
+ python cosmos_db_mongo_create_or_update_database_migration_max.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DataMigrationManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-1111-2222-3333-444444444444",
+ )
+
+ response = client.database_migrations_mongo_to_cosmos_dbv_core_mongo.begin_create(
+ resource_group_name="testrg",
+ target_resource_name="targetCosmosDbClusterName",
+ migration_name="migrationRequest",
+ parameters={
+ "properties": {
+ "collectionList": [
+ {
+ "sourceCollection": "sourceCol1",
+ "sourceDatabase": "sourceDb1",
+ "targetCollection": "targetCol1",
+ "targetDatabase": "targetDb1",
+ },
+ {
+ "sourceCollection": "sourceCol2",
+ "sourceDatabase": "sourceDb2",
+ "targetCollection": "sourceCol2",
+ "targetDatabase": "sourceDb2",
+ },
+ ],
+ "kind": "MongoToCosmosDbMongo",
+ "migrationService": "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.DataMigration/MigrationServices/testMigrationService",
+ "scope": "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.DocumentDB/mongoClusters/targetCosmosDbClusterName",
+ "sourceMongoConnection": {
+ "host": "abc.mongodb.com",
+ "password": "placeholder",
+ "port": 88,
+ "useSsl": True,
+ "userName": "abc",
+ },
+ "targetMongoConnection": {
+ "host": "xyz.mongocluster.cosmos.azure.com",
+ "password": "placeholder",
+ "port": 10255,
+ "useSsl": True,
+ "userName": "def",
+ },
+ }
+ },
+ ).result()
+ print(response)
+
+
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/CosmosDbMongoCreateOrUpdateDatabaseMigrationMAX.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_create_or_update_database_migration_min.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_create_or_update_database_migration_min.py
new file mode 100644
index 000000000000..c71638545a9b
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_create_or_update_database_migration_min.py
@@ -0,0 +1,67 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-datamigration
+# USAGE
+ python cosmos_db_mongo_create_or_update_database_migration_min.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DataMigrationManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-1111-2222-3333-444444444444",
+ )
+
+ response = client.database_migrations_mongo_to_cosmos_dbv_core_mongo.begin_create(
+ resource_group_name="testrg",
+ target_resource_name="targetCosmosDbClusterName",
+ migration_name="migrationRequest",
+ parameters={
+ "properties": {
+ "collectionList": [
+ {
+ "sourceCollection": "sourceCol1",
+ "sourceDatabase": "sourceDb1",
+ "targetCollection": "targetCol1",
+ "targetDatabase": "targetDb1",
+ },
+ {"sourceCollection": "sourceCol2", "sourceDatabase": "sourceDb2"},
+ ],
+ "kind": "MongoToCosmosDbMongo",
+ "migrationService": "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.DataMigration/MigrationServices/testMigrationService",
+ "scope": "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.DocumentDB/mongoClusters/targetCosmosDbClusterName",
+ "sourceMongoConnection": {
+ "host": "abc.mongodb.com",
+ "password": "placeholder",
+ "port": 88,
+ "useSsl": True,
+ "userName": "abc",
+ },
+ "targetMongoConnection": {"connectionString": "placeholder"},
+ }
+ },
+ ).result()
+ print(response)
+
+
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/CosmosDbMongoCreateOrUpdateDatabaseMigrationMIN.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_delete_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_delete_database_migration.py
new file mode 100644
index 000000000000..e94cb3bb3a60
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_delete_database_migration.py
@@ -0,0 +1,42 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-datamigration
+# USAGE
+ python cosmos_db_mongo_delete_database_migration.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DataMigrationManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-1111-2222-3333-444444444444",
+ )
+
+ client.database_migrations_mongo_to_cosmos_dbv_core_mongo.begin_delete(
+ resource_group_name="testrg",
+ target_resource_name="targetCosmosDbClusterName",
+ migration_name="migrationRequest",
+ ).result()
+
+
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/CosmosDbMongoDeleteDatabaseMigration.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_get_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_get_database_migration.py
new file mode 100644
index 000000000000..c3f0a97edbd5
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_get_database_migration.py
@@ -0,0 +1,43 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-datamigration
+# USAGE
+ python cosmos_db_mongo_get_database_migration.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DataMigrationManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-1111-2222-3333-444444444444",
+ )
+
+ response = client.database_migrations_mongo_to_cosmos_dbv_core_mongo.get(
+ resource_group_name="testrg",
+ target_resource_name="targetCosmosDbClusterName",
+ migration_name="migrationRequest",
+ )
+ print(response)
+
+
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/CosmosDbMongoGetDatabaseMigration.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_get_database_migration_expanded.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_get_database_migration_expanded.py
new file mode 100644
index 000000000000..bb2a6f29e4f7
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_get_database_migration_expanded.py
@@ -0,0 +1,43 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-datamigration
+# USAGE
+ python cosmos_db_mongo_get_database_migration_expanded.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DataMigrationManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-1111-2222-3333-444444444444",
+ )
+
+ response = client.database_migrations_mongo_to_cosmos_dbv_core_mongo.get(
+ resource_group_name="testrg",
+ target_resource_name="targetCosmosDbClusterName",
+ migration_name="migrationRequest",
+ )
+ print(response)
+
+
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/CosmosDbMongoGetDatabaseMigrationExpanded.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_list_by_scope_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_list_by_scope_database_migration.py
new file mode 100644
index 000000000000..bd1471c601ba
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_list_by_scope_database_migration.py
@@ -0,0 +1,43 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-datamigration
+# USAGE
+ python cosmos_db_mongo_list_by_scope_database_migration.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DataMigrationManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-1111-2222-3333-444444444444",
+ )
+
+ response = client.database_migrations_mongo_to_cosmos_dbv_core_mongo.get_for_scope(
+ resource_group_name="testrg",
+ target_resource_name="targetCosmosDbClusterName",
+ )
+ for item in response:
+ print(item)
+
+
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/CosmosDbMongoListByScopeDatabaseMigration.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_max.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_max.py
index bcd243168839..3fb97a41f500 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_max.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_max.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,14 +30,14 @@ def main():
subscription_id="00000000-1111-2222-3333-444444444444",
)
- response = client.sql_migration_services.begin_create_or_update(
+ response = client.migration_services.begin_create_or_update(
resource_group_name="testrg",
- sql_migration_service_name="testagent",
+ migration_service_name="testagent",
parameters={"location": "northeurope"},
).result()
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/CreateOrUpdateMigrationServiceMAX.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/CreateOrUpdateMigrationServiceMAX.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_min.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_min.py
index 47c4bec2b286..7bf748205446 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_min.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_min.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,14 +30,14 @@ def main():
subscription_id="00000000-1111-2222-3333-444444444444",
)
- response = client.sql_migration_services.begin_create_or_update(
+ response = client.migration_services.begin_create_or_update(
resource_group_name="testrg",
- sql_migration_service_name="testagent",
+ migration_service_name="testagent",
parameters={"location": "northeurope"},
).result()
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/CreateOrUpdateMigrationServiceMIN.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/CreateOrUpdateMigrationServiceMIN.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_sql_migration_service_max.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_sql_migration_service_max.py
new file mode 100644
index 000000000000..f0d9a6d15fe7
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_sql_migration_service_max.py
@@ -0,0 +1,43 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-datamigration
+# USAGE
+ python create_or_update_sql_migration_service_max.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DataMigrationManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-1111-2222-3333-444444444444",
+ )
+
+ response = client.sql_migration_services.begin_create_or_update(
+ resource_group_name="testrg",
+ sql_migration_service_name="testagent",
+ parameters={"location": "northeurope"},
+ ).result()
+ print(response)
+
+
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/CreateOrUpdateSqlMigrationServiceMAX.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_sql_migration_service_min.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_sql_migration_service_min.py
new file mode 100644
index 000000000000..8bc40720aadf
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_sql_migration_service_min.py
@@ -0,0 +1,43 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-datamigration
+# USAGE
+ python create_or_update_sql_migration_service_min.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DataMigrationManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-1111-2222-3333-444444444444",
+ )
+
+ response = client.sql_migration_services.begin_create_or_update(
+ resource_group_name="testrg",
+ sql_migration_service_name="testagent",
+ parameters={"location": "northeurope"},
+ ).result()
+ print(response)
+
+
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/CreateOrUpdateSqlMigrationServiceMIN.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_integration_runtime_node.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_integration_runtime_node.py
index f28ecf4b8645..6fd097f52cf6 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_integration_runtime_node.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_integration_runtime_node.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/DeleteIntegrationRuntimeNode.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/DeleteIntegrationRuntimeNode.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_migration_service.py
index 40d496e06f36..fa845d5b724a 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_migration_service.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_migration_service.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,13 +30,12 @@ def main():
subscription_id="00000000-1111-2222-3333-444444444444",
)
- response = client.sql_migration_services.begin_delete(
+ client.migration_services.begin_delete(
resource_group_name="testrg",
- sql_migration_service_name="service1",
+ migration_service_name="service1",
).result()
- print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/DeleteMigrationService.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/DeleteMigrationService.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_sql_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_sql_migration_service.py
new file mode 100644
index 000000000000..e4d4359c0ab5
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_sql_migration_service.py
@@ -0,0 +1,41 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-datamigration
+# USAGE
+ python delete_sql_migration_service.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DataMigrationManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-1111-2222-3333-444444444444",
+ )
+
+ client.sql_migration_services.begin_delete(
+ resource_group_name="testrg",
+ sql_migration_service_name="service1",
+ ).result()
+
+
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/DeleteSqlMigrationService.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_create_or_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_create_or_update.py
index ff8e4e8f45e9..0ba89c2890d2 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_create_or_update.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_create_or_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -39,6 +40,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Files_CreateOrUpdate.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Files_CreateOrUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_delete.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_delete.py
index 5c09a638dd25..7f68e360ba46 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_delete.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,15 +30,14 @@ def main():
subscription_id="fc04246f-04c5-437e-ac5e-206a19e7193f",
)
- response = client.files.delete(
+ client.files.delete(
group_name="DmsSdkRg",
service_name="DmsSdkService",
project_name="DmsSdkProject",
file_name="x114d023d8",
)
- print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Files_Delete.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Files_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_get.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_get.py
index c3bc13aad6f2..c1f02f80ce3e 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_get.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -38,6 +39,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Files_Get.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Files_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_list.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_list.py
index 7fda5829c8be..b7f0efdd1312 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_list.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_list.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -38,6 +39,6 @@ def main():
print(item)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Files_List.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Files_List.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read.py
index ad44996b9b8d..a530404bc592 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -38,6 +39,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Files_Read.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Files_Read.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read_write.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read_write.py
index 0882266264f0..53a15c860795 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read_write.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read_write.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -38,6 +39,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Files_ReadWrite.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Files_ReadWrite.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_update.py
index 0133391e1b05..82c2cc215702 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_update.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -39,6 +40,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Files_Update.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Files_Update.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_migration_service.py
index 07f4eee706f2..9c05f684020a 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_migration_service.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_migration_service.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,13 +30,13 @@ def main():
subscription_id="00000000-1111-2222-3333-444444444444",
)
- response = client.sql_migration_services.get(
+ response = client.migration_services.get(
resource_group_name="testrg",
- sql_migration_service_name="service1",
+ migration_service_name="service1",
)
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/GetMigrationService.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/GetMigrationService.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_monitor_data_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_monitor_data_sql_migration_service.py
similarity index 90%
rename from sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_monitor_data_migration_service.py
rename to sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_monitor_data_sql_migration_service.py
index fbb3a8baa4d5..5b1a4b711a7a 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_monitor_data_migration_service.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_monitor_data_sql_migration_service.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -14,7 +15,7 @@
pip install azure-identity
pip install azure-mgmt-datamigration
# USAGE
- python get_monitor_data_migration_service.py
+ python get_monitor_data_sql_migration_service.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
@@ -36,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/GetMonitorDataMigrationService.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/GetMonitorDataSqlMigrationService.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_sql_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_sql_migration_service.py
new file mode 100644
index 000000000000..753c60295017
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_sql_migration_service.py
@@ -0,0 +1,42 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-datamigration
+# USAGE
+ python get_sql_migration_service.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DataMigrationManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-1111-2222-3333-444444444444",
+ )
+
+ response = client.sql_migration_services.get(
+ resource_group_name="testrg",
+ sql_migration_service_name="service1",
+ )
+ print(response)
+
+
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/GetSqlMigrationService.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_auth_keys_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_auth_keys_sql_migration_service.py
similarity index 90%
rename from sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_auth_keys_migration_service.py
rename to sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_auth_keys_sql_migration_service.py
index 910ed8e27a59..4f111380b83c 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_auth_keys_migration_service.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_auth_keys_sql_migration_service.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -14,7 +15,7 @@
pip install azure-identity
pip install azure-mgmt-datamigration
# USAGE
- python list_auth_keys_migration_service.py
+ python list_auth_keys_sql_migration_service.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
@@ -36,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ListAuthKeysMigrationService.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/ListAuthKeysSqlMigrationService.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_resource_group_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_resource_group_migration_service.py
index 65b788d1261c..b8b44d73b6a2 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_resource_group_migration_service.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_resource_group_migration_service.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,13 +30,13 @@ def main():
subscription_id="00000000-1111-2222-3333-444444444444",
)
- response = client.sql_migration_services.list_by_resource_group(
+ response = client.migration_services.list_by_resource_group(
resource_group_name="testrg",
)
for item in response:
print(item)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ListByResourceGroupMigrationService.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/ListByResourceGroupMigrationService.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_resource_group_sql_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_resource_group_sql_migration_service.py
new file mode 100644
index 000000000000..d2e5923624c8
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_resource_group_sql_migration_service.py
@@ -0,0 +1,42 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-datamigration
+# USAGE
+ python list_by_resource_group_sql_migration_service.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DataMigrationManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-1111-2222-3333-444444444444",
+ )
+
+ response = client.sql_migration_services.list_by_resource_group(
+ resource_group_name="testrg",
+ )
+ for item in response:
+ print(item)
+
+
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/ListByResourceGroupSqlMigrationService.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_subscription_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_subscription_migration_service.py
index 767b6790181f..79ce6f3b976b 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_subscription_migration_service.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_subscription_migration_service.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,11 +30,11 @@ def main():
subscription_id="subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.Sql/managedInstances/managedInstance1",
)
- response = client.sql_migration_services.list_by_subscription()
+ response = client.migration_services.list_by_subscription()
for item in response:
print(item)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ListBySubscriptionMigrationService.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/ListBySubscriptionMigrationService.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_subscription_sql_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_subscription_sql_migration_service.py
new file mode 100644
index 000000000000..f165e0e01e11
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_subscription_sql_migration_service.py
@@ -0,0 +1,40 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-datamigration
+# USAGE
+ python list_by_subscription_sql_migration_service.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DataMigrationManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.Sql/managedInstances/managedInstance1",
+ )
+
+ response = client.sql_migration_services.list_by_subscription()
+ for item in response:
+ print(item)
+
+
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/ListBySubscriptionSqlMigrationService.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_migrations_by_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_migrations_by_migration_service.py
index d52fd7beb861..80ad350f20a6 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_migrations_by_migration_service.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_migrations_by_migration_service.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,14 +30,14 @@ def main():
subscription_id="00000000-1111-2222-3333-444444444444",
)
- response = client.sql_migration_services.list_migrations(
+ response = client.migration_services.list_migrations(
resource_group_name="testrg",
- sql_migration_service_name="service1",
+ migration_service_name="testMigrationService",
)
for item in response:
print(item)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ListMigrationsByMigrationService.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/ListMigrationsByMigrationService.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_migrations_by_sql_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_migrations_by_sql_migration_service.py
new file mode 100644
index 000000000000..9fb003be45ca
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_migrations_by_sql_migration_service.py
@@ -0,0 +1,43 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-datamigration
+# USAGE
+ python list_migrations_by_sql_migration_service.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DataMigrationManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-1111-2222-3333-444444444444",
+ )
+
+ response = client.sql_migration_services.list_migrations(
+ resource_group_name="testrg",
+ sql_migration_service_name="service1",
+ )
+ for item in response:
+ print(item)
+
+
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/ListMigrationsBySqlMigrationService.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_operation.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_operation.py
index 27d921da1820..9104a423251a 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_operation.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_operation.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -34,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ListOperation.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/ListOperation.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_create_or_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_create_or_update.py
index 30ecd5ad1fd4..b8915ab644be 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_create_or_update.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_create_or_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -38,6 +39,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Projects_CreateOrUpdate.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Projects_CreateOrUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_delete.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_delete.py
index 2c4405dd482d..4f0f6e7b0af9 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_delete.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,14 +30,13 @@ def main():
subscription_id="fc04246f-04c5-437e-ac5e-206a19e7193f",
)
- response = client.projects.delete(
+ client.projects.delete(
group_name="DmsSdkRg",
service_name="DmsSdkService",
project_name="DmsSdkProject",
)
- print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Projects_Delete.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Projects_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_get.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_get.py
index b91acdb7aef3..f9dbfd67607b 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_get.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Projects_Get.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Projects_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_list.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_list.py
index 92b2465fd146..e85418388123 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_list.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_list.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(item)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Projects_List.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Projects_List.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_update.py
index f4a9af3cab5c..3cdca4b6c13d 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_update.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -38,6 +39,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Projects_Update.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Projects_Update.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/regen_auth_keys_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/regen_auth_keys_sql_migration_service.py
similarity index 90%
rename from sdk/datamigration/azure-mgmt-datamigration/generated_samples/regen_auth_keys_migration_service.py
rename to sdk/datamigration/azure-mgmt-datamigration/generated_samples/regen_auth_keys_sql_migration_service.py
index cdbacf854d0d..6e65f1a32985 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/regen_auth_keys_migration_service.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/regen_auth_keys_sql_migration_service.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -14,7 +15,7 @@
pip install azure-identity
pip install azure-mgmt-datamigration
# USAGE
- python regen_auth_keys_migration_service.py
+ python regen_auth_keys_sql_migration_service.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/RegenAuthKeysMigrationService.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/RegenAuthKeysSqlMigrationService.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/resource_skus_list_skus.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/resource_skus_list_skus.py
index d840dbb3eb0e..4c7ab9e84413 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/resource_skus_list_skus.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/resource_skus_list_skus.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -34,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ResourceSkus_ListSkus.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/ResourceSkus_ListSkus.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_cancel.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_cancel.py
index c7d9e12933f3..68e420971221 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_cancel.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_cancel.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ServiceTasks_Cancel.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/ServiceTasks_Cancel.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_create_or_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_create_or_update.py
index e743a0755578..6ce0d5821852 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_create_or_update.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_create_or_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -33,11 +34,18 @@ def main():
group_name="DmsSdkRg",
service_name="DmsSdkService",
task_name="DmsSdkTask",
- parameters={"properties": {"input": {"serverVersion": "NA"}, "taskType": "Service.Check.OCI"}},
+ parameters={
+ "properties": {
+ "input": {
+ "sourceConnectionInfo": {"port": 3306, "serverName": "localhost", "type": "MySqlConnectionInfo"}
+ },
+ "taskType": "ConnectToSource.MySql",
+ }
+ },
)
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ServiceTasks_CreateOrUpdate.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/ServiceTasks_CreateOrUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_delete.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_delete.py
index 508f39869e75..e924f3bf8ee7 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_delete.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,14 +30,13 @@ def main():
subscription_id="fc04246f-04c5-437e-ac5e-206a19e7193f",
)
- response = client.service_tasks.delete(
+ client.service_tasks.delete(
group_name="DmsSdkRg",
service_name="DmsSdkService",
task_name="DmsSdkTask",
)
- print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ServiceTasks_Delete.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/ServiceTasks_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_get.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_get.py
index 4c405770957c..e5e5b775d651 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_get.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ServiceTasks_Get.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/ServiceTasks_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_list.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_list.py
index 938aab05cac7..a3f196383ccd 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_list.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_list.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(item)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ServiceTasks_List.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/ServiceTasks_List.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_update.py
index 7c22110decf8..c26f452691fa 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_update.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -33,11 +34,18 @@ def main():
group_name="DmsSdkRg",
service_name="DmsSdkService",
task_name="DmsSdkTask",
- parameters={"properties": {"input": {"serverVersion": "NA"}, "taskType": "Service.Check.OCI"}},
+ parameters={
+ "properties": {
+ "input": {
+ "sourceConnectionInfo": {"port": 3306, "serverName": "localhost", "type": "MySqlConnectionInfo"}
+ },
+ "taskType": "ConnectToSource.MySql",
+ }
+ },
)
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ServiceTasks_Update.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/ServiceTasks_Update.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_children_name_availability.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_children_name_availability.py
index fb2fe8a39a50..7638bc73893a 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_children_name_availability.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_children_name_availability.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_CheckChildrenNameAvailability.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Services_CheckChildrenNameAvailability.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_name_availability.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_name_availability.py
index d8b50b5c39c4..2fae13973c2a 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_name_availability.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_name_availability.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -36,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_CheckNameAvailability.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Services_CheckNameAvailability.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_status.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_status.py
index 76d907a83761..1d2e8064532c 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_status.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_status.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -36,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_CheckStatus.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Services_CheckStatus.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_create_or_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_create_or_update.py
index 6789f1c0d2f5..f788d7564c99 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_create_or_update.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_create_or_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -43,6 +44,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_CreateOrUpdate.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Services_CreateOrUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_delete.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_delete.py
index 889012e3503c..9b23a94dec43 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_delete.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,13 +30,12 @@ def main():
subscription_id="fc04246f-04c5-437e-ac5e-206a19e7193f",
)
- response = client.services.begin_delete(
+ client.services.begin_delete(
group_name="DmsSdkRg",
service_name="DmsSdkService",
).result()
- print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_Delete.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Services_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_get.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_get.py
index 690b44e4ddc1..9b90d49e610f 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_get.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -36,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_Get.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Services_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list.py
index 7c445fa2f2ab..553f8919133e 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -34,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_List.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Services_List.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_by_resource_group.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_by_resource_group.py
index 83b6c26cf554..6f764f77e2ff 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_by_resource_group.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_by_resource_group.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -36,6 +37,6 @@ def main():
print(item)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_ListByResourceGroup.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Services_ListByResourceGroup.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_skus.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_skus.py
index 023a71b6e58d..3863c5446e66 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_skus.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_skus.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(item)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_ListSkus.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Services_ListSkus.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_start.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_start.py
index e9964e6ebd6f..217135530a5b 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_start.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_start.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,13 +30,12 @@ def main():
subscription_id="fc04246f-04c5-437e-ac5e-206a19e7193f",
)
- response = client.services.begin_start(
+ client.services.begin_start(
group_name="DmsSdkRg",
service_name="DmsSdkService",
).result()
- print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_Start.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Services_Start.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_stop.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_stop.py
index 53872ced25d1..5105f1ce00f9 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_stop.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_stop.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,13 +30,12 @@ def main():
subscription_id="fc04246f-04c5-437e-ac5e-206a19e7193f",
)
- response = client.services.begin_stop(
+ client.services.begin_stop(
group_name="DmsSdkRg",
service_name="DmsSdkService",
).result()
- print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_Stop.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Services_Stop.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_update.py
index a584a3151afb..e1477fda9d5b 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_update.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -42,6 +43,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_Update.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Services_Update.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_cancel_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_cancel_database_migration.py
index 0d5c3dd7a042..7d111ea1105f 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_cancel_database_migration.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_cancel_database_migration.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,15 +30,14 @@ def main():
subscription_id="00000000-1111-2222-3333-444444444444",
)
- response = client.database_migrations_sql_db.begin_cancel(
+ client.database_migrations_sql_db.begin_cancel(
resource_group_name="testrg",
sql_db_instance_name="sqldbinstance",
target_db_name="db1",
parameters={"migrationOperationId": "9a90bb84-e70f-46f7-b0ae-1aef5b3b9f07"},
).result()
- print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlDbCancelDatabaseMigration.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlDbCancelDatabaseMigration.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_max.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_max.py
index 79fac342ea71..0004f0126426 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_max.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_max.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -62,6 +63,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlDbCreateOrUpdateDatabaseMigrationMAX.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlDbCreateOrUpdateDatabaseMigrationMAX.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_min.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_min.py
index ba4edfd758fc..b78d17a68859 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_min.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_min.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -61,6 +62,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlDbCreateOrUpdateDatabaseMigrationMIN.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlDbCreateOrUpdateDatabaseMigrationMIN.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_delete_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_delete_database_migration.py
index eb138caeef78..6ddb127fc848 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_delete_database_migration.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_delete_database_migration.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,14 +30,13 @@ def main():
subscription_id="00000000-1111-2222-3333-444444444444",
)
- response = client.database_migrations_sql_db.begin_delete(
+ client.database_migrations_sql_db.begin_delete(
resource_group_name="testrg",
sql_db_instance_name="sqldbinstance",
target_db_name="db1",
).result()
- print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlDbDeleteDatabaseMigration.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlDbDeleteDatabaseMigration.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration.py
index a9eb1654be66..0c83a4955b72 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlDbGetDatabaseMigration.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlDbGetDatabaseMigration.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration_expanded.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration_expanded.py
index c956643d291e..1ce1a2e8e4ae 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration_expanded.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration_expanded.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlDbGetDatabaseMigrationExpanded.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlDbGetDatabaseMigrationExpanded.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cancel_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cancel_database_migration.py
index b38612880408..6690b916b065 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cancel_database_migration.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cancel_database_migration.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,15 +30,14 @@ def main():
subscription_id="00000000-1111-2222-3333-444444444444",
)
- response = client.database_migrations_sql_mi.begin_cancel(
+ client.database_migrations_sql_mi.begin_cancel(
resource_group_name="testrg",
managed_instance_name="managedInstance1",
target_db_name="db1",
parameters={"migrationOperationId": "4124fe90-d1b6-4b50-b4d9-46d02381f59a"},
).result()
- print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlMiCancelDatabaseMigration.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlMiCancelDatabaseMigration.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_blob_managed_identity.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_blob_managed_identity.py
new file mode 100644
index 000000000000..4854504b5bfb
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_blob_managed_identity.py
@@ -0,0 +1,67 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-datamigration
+# USAGE
+ python sql_mi_create_or_update_database_migration_blob_managed_identity.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DataMigrationManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-1111-2222-3333-444444444444",
+ )
+
+ response = client.database_migrations_sql_mi.begin_create_or_update(
+ resource_group_name="testrg",
+ managed_instance_name="managedInstance1",
+ target_db_name="db1",
+ parameters={
+ "properties": {
+ "backupConfiguration": {
+ "sourceLocation": {
+ "azureBlob": {
+ "authType": "ManagedIdentity",
+ "blobContainerName": "test",
+ "identity": {
+ "type": "UserAssigned",
+ "userAssignedIdentities": {
+ "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/testidentity": {}
+ },
+ },
+ "storageAccountResourceId": "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.Storage/storageAccounts/teststorageaccount",
+ }
+ }
+ },
+ "kind": "SqlMi",
+ "migrationService": "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.DataMigration/sqlMigrationServices/testagent",
+ "offlineConfiguration": {"lastBackupName": "last_backup_file_name", "offline": True},
+ "scope": "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.Sql/managedInstances/instance",
+ "sourceDatabaseName": "aaa",
+ }
+ },
+ ).result()
+ print(response)
+
+
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlMiCreateOrUpdateDatabaseMigrationBlobManagedIdentity.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_max.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_max.py
index 1202df122e28..1e04b668c5ae 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_max.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_max.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -63,6 +64,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlMiCreateOrUpdateDatabaseMigrationMAX.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlMiCreateOrUpdateDatabaseMigrationMAX.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_min.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_min.py
index 2122e311aa97..b1e6c6626dab 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_min.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_min.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -62,6 +63,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlMiCreateOrUpdateDatabaseMigrationMIN.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlMiCreateOrUpdateDatabaseMigrationMIN.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cutover_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cutover_database_migration.py
index 1d62366a62ac..b7e92e1dbdd5 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cutover_database_migration.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cutover_database_migration.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,15 +30,14 @@ def main():
subscription_id="00000000-1111-2222-3333-444444444444",
)
- response = client.database_migrations_sql_mi.begin_cutover(
+ client.database_migrations_sql_mi.begin_cutover(
resource_group_name="testrg",
managed_instance_name="managedInstance1",
target_db_name="db1",
parameters={"migrationOperationId": "4124fe90-d1b6-4b50-b4d9-46d02381f59a"},
).result()
- print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlMiCutoverDatabaseMigration.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlMiCutoverDatabaseMigration.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration.py
index 3d0943aede67..007d5bc7a80b 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlMiGetDatabaseMigration.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlMiGetDatabaseMigration.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration_expanded.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration_expanded.py
index d8d6bfc6684e..c8d9573c88c8 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration_expanded.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration_expanded.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlMiGetDatabaseMigrationExpanded.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlMiGetDatabaseMigrationExpanded.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cancel_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cancel_database_migration.py
index 1b40510df83d..881426515a18 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cancel_database_migration.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cancel_database_migration.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,15 +30,14 @@ def main():
subscription_id="00000000-1111-2222-3333-444444444444",
)
- response = client.database_migrations_sql_vm.begin_cancel(
+ client.database_migrations_sql_vm.begin_cancel(
resource_group_name="testrg",
sql_virtual_machine_name="testvm",
target_db_name="db1",
parameters={"migrationOperationId": "4124fe90-d1b6-4b50-b4d9-46d02381f59a"},
).result()
- print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlVmCancelDatabaseMigration.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlVmCancelDatabaseMigration.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_max.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_max.py
index abe4d72474d3..731f981454aa 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_max.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_max.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -63,6 +64,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlVmCreateOrUpdateDatabaseMigrationMAX.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlVmCreateOrUpdateDatabaseMigrationMAX.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_min.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_min.py
index 6a86febb00f7..8ac88d3143df 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_min.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_min.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -62,6 +63,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlVmCreateOrUpdateDatabaseMigrationMIN.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlVmCreateOrUpdateDatabaseMigrationMIN.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cutover_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cutover_database_migration.py
index c2b759e0f9e2..7d978f3ad02c 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cutover_database_migration.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cutover_database_migration.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,15 +30,14 @@ def main():
subscription_id="00000000-1111-2222-3333-444444444444",
)
- response = client.database_migrations_sql_vm.begin_cutover(
+ client.database_migrations_sql_vm.begin_cutover(
resource_group_name="testrg",
sql_virtual_machine_name="testvm",
target_db_name="db1",
parameters={"migrationOperationId": "4124fe90-d1b6-4b50-b4d9-46d02381f59a"},
).result()
- print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlVmCutoverDatabaseMigration.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlVmCutoverDatabaseMigration.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration.py
index a0167d9a4d8b..5fa236274f4d 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlVmGetDatabaseMigration.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlVmGetDatabaseMigration.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration_expanded.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration_expanded.py
index 900791f99a5f..c5e328112a32 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration_expanded.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration_expanded.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlVmGetDatabaseMigrationExpanded.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/SqlVmGetDatabaseMigrationExpanded.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_cancel.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_cancel.py
index dec7cd587696..865ee07c4fcc 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_cancel.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_cancel.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -38,6 +39,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Tasks_Cancel.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Tasks_Cancel.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_command.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_command.py
index b6a9b841eddf..1f53dbca7aa0 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_command.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_command.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -39,6 +40,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Tasks_Command.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Tasks_Command.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_create_or_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_create_or_update.py
index 1c22aa9d33ff..7f8457a47c65 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_create_or_update.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_create_or_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -54,6 +55,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Tasks_CreateOrUpdate.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Tasks_CreateOrUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_delete.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_delete.py
index 08e6cff502d8..31d2e7fa00f6 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_delete.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,15 +30,14 @@ def main():
subscription_id="fc04246f-04c5-437e-ac5e-206a19e7193f",
)
- response = client.tasks.delete(
+ client.tasks.delete(
group_name="DmsSdkRg",
service_name="DmsSdkService",
project_name="DmsSdkProject",
task_name="DmsSdkTask",
)
- print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Tasks_Delete.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Tasks_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_get.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_get.py
index 42fffa423dba..62e82c7c28c7 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_get.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -38,6 +39,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Tasks_Get.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Tasks_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_list.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_list.py
index 64bfed16aff8..89af28aee536 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_list.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_list.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -38,6 +39,6 @@ def main():
print(item)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Tasks_List.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Tasks_List.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_update.py
index 28d46d756d2b..725bbb4e27a3 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_update.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -54,6 +55,6 @@ def main():
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Tasks_Update.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Tasks_Update.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/update_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/update_migration_service.py
index a016018f2e5d..1650fa18c5c5 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/update_migration_service.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/update_migration_service.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -29,14 +30,14 @@ def main():
subscription_id="00000000-1111-2222-3333-444444444444",
)
- response = client.sql_migration_services.begin_update(
+ response = client.migration_services.begin_update(
resource_group_name="testrg",
- sql_migration_service_name="testagent",
+ migration_service_name="testagent",
parameters={"tags": {"mytag": "myval"}},
).result()
print(response)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/UpdateMigrationService.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/UpdateMigrationService.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/update_sql_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/update_sql_migration_service.py
new file mode 100644
index 000000000000..f4ebe97034b2
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/update_sql_migration_service.py
@@ -0,0 +1,43 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-datamigration
+# USAGE
+ python update_sql_migration_service.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DataMigrationManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-1111-2222-3333-444444444444",
+ )
+
+ response = client.sql_migration_services.begin_update(
+ resource_group_name="testrg",
+ sql_migration_service_name="testagent",
+ parameters={"tags": {"mytag": "myval"}},
+ ).result()
+ print(response)
+
+
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/UpdateSqlMigrationService.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/usages_list.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/usages_list.py
index 88a866a9e459..b4609ec105c9 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/usages_list.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/usages_list.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.datamigration import DataMigrationManagementClient
"""
@@ -36,6 +37,6 @@ def main():
print(item)
-# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Usages_List.json
+# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2025-03-15-preview/examples/Usages_List.json
if __name__ == "__main__":
main()
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/conftest.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/conftest.py
new file mode 100644
index 000000000000..aeeaf83d5dde
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/conftest.py
@@ -0,0 +1,43 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import os
+import pytest
+from dotenv import load_dotenv
+from devtools_testutils import (
+ test_proxy,
+ add_general_regex_sanitizer,
+ add_body_key_sanitizer,
+ add_header_regex_sanitizer,
+)
+
+load_dotenv()
+
+
+# For security, please avoid record sensitive identity information in recordings
+@pytest.fixture(scope="session", autouse=True)
+def add_sanitizers(test_proxy):
+ datamigrationmanagement_subscription_id = os.environ.get(
+ "AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000"
+ )
+ datamigrationmanagement_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000")
+ datamigrationmanagement_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000")
+ datamigrationmanagement_client_secret = os.environ.get(
+ "AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000"
+ )
+ add_general_regex_sanitizer(
+ regex=datamigrationmanagement_subscription_id, value="00000000-0000-0000-0000-000000000000"
+ )
+ add_general_regex_sanitizer(regex=datamigrationmanagement_tenant_id, value="00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(regex=datamigrationmanagement_client_id, value="00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(
+ regex=datamigrationmanagement_client_secret, value="00000000-0000-0000-0000-000000000000"
+ )
+
+ add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]")
+ add_header_regex_sanitizer(key="Cookie", value="cookie;")
+ add_body_key_sanitizer(json_path="$..access_token", value="access_token")
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py
new file mode 100644
index 000000000000..12c19da0e97d
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py
@@ -0,0 +1,123 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementDatabaseMigrationsMongoToCosmosDbRUMongoOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_mongo_to_cosmos_db_ru_mongo_get(self, resource_group):
+ response = self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.get(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ migration_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_mongo_to_cosmos_db_ru_mongo_begin_create(self, resource_group):
+ response = self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.begin_create(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ migration_name="str",
+ parameters={
+ "collectionList": [
+ {
+ "migrationProgressDetails": {
+ "durationInSeconds": 0,
+ "migrationError": "str",
+ "migrationStatus": "str",
+ "processedDocumentCount": 0,
+ "sourceDocumentCount": 0,
+ },
+ "sourceCollection": "str",
+ "sourceDatabase": "str",
+ "targetCollection": "str",
+ "targetDatabase": "str",
+ }
+ ],
+ "endedOn": "2020-02-20 00:00:00",
+ "id": "str",
+ "migrationFailureError": {"code": "str", "message": "str"},
+ "migrationOperationId": "str",
+ "migrationService": "str",
+ "migrationStatus": "str",
+ "name": "str",
+ "provisioningError": "str",
+ "provisioningState": "str",
+ "scope": "str",
+ "sourceMongoConnection": {
+ "connectionString": "str",
+ "host": "str",
+ "password": "str",
+ "port": 0,
+ "useSsl": bool,
+ "userName": "str",
+ },
+ "startedOn": "2020-02-20 00:00:00",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "targetMongoConnection": {
+ "connectionString": "str",
+ "host": "str",
+ "password": "str",
+ "port": 0,
+ "useSsl": bool,
+ "userName": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_mongo_to_cosmos_db_ru_mongo_begin_delete(self, resource_group):
+ response = self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.begin_delete(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ migration_name="str",
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_mongo_to_cosmos_db_ru_mongo_get_for_scope(self, resource_group):
+ response = self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.get_for_scope(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_db_ru_mongo_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_db_ru_mongo_operations_async.py
new file mode 100644
index 000000000000..df29203a88d6
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_db_ru_mongo_operations_async.py
@@ -0,0 +1,128 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration.aio import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementDatabaseMigrationsMongoToCosmosDbRUMongoOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_mongo_to_cosmos_db_ru_mongo_get(self, resource_group):
+ response = await self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.get(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ migration_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_mongo_to_cosmos_db_ru_mongo_begin_create(self, resource_group):
+ response = await (
+ await self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.begin_create(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ migration_name="str",
+ parameters={
+ "collectionList": [
+ {
+ "migrationProgressDetails": {
+ "durationInSeconds": 0,
+ "migrationError": "str",
+ "migrationStatus": "str",
+ "processedDocumentCount": 0,
+ "sourceDocumentCount": 0,
+ },
+ "sourceCollection": "str",
+ "sourceDatabase": "str",
+ "targetCollection": "str",
+ "targetDatabase": "str",
+ }
+ ],
+ "endedOn": "2020-02-20 00:00:00",
+ "id": "str",
+ "migrationFailureError": {"code": "str", "message": "str"},
+ "migrationOperationId": "str",
+ "migrationService": "str",
+ "migrationStatus": "str",
+ "name": "str",
+ "provisioningError": "str",
+ "provisioningState": "str",
+ "scope": "str",
+ "sourceMongoConnection": {
+ "connectionString": "str",
+ "host": "str",
+ "password": "str",
+ "port": 0,
+ "useSsl": bool,
+ "userName": "str",
+ },
+ "startedOn": "2020-02-20 00:00:00",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "targetMongoConnection": {
+ "connectionString": "str",
+ "host": "str",
+ "password": "str",
+ "port": 0,
+ "useSsl": bool,
+ "userName": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_mongo_to_cosmos_db_ru_mongo_begin_delete(self, resource_group):
+ response = await (
+ await self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.begin_delete(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ migration_name="str",
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_mongo_to_cosmos_db_ru_mongo_get_for_scope(self, resource_group):
+ response = self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.get_for_scope(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py
new file mode 100644
index 000000000000..37d873a2ab0f
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py
@@ -0,0 +1,123 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementDatabaseMigrationsMongoToCosmosDbvCoreMongoOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_mongo_to_cosmos_dbv_core_mongo_get(self, resource_group):
+ response = self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.get(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ migration_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_mongo_to_cosmos_dbv_core_mongo_begin_create(self, resource_group):
+ response = self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.begin_create(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ migration_name="str",
+ parameters={
+ "collectionList": [
+ {
+ "migrationProgressDetails": {
+ "durationInSeconds": 0,
+ "migrationError": "str",
+ "migrationStatus": "str",
+ "processedDocumentCount": 0,
+ "sourceDocumentCount": 0,
+ },
+ "sourceCollection": "str",
+ "sourceDatabase": "str",
+ "targetCollection": "str",
+ "targetDatabase": "str",
+ }
+ ],
+ "endedOn": "2020-02-20 00:00:00",
+ "id": "str",
+ "migrationFailureError": {"code": "str", "message": "str"},
+ "migrationOperationId": "str",
+ "migrationService": "str",
+ "migrationStatus": "str",
+ "name": "str",
+ "provisioningError": "str",
+ "provisioningState": "str",
+ "scope": "str",
+ "sourceMongoConnection": {
+ "connectionString": "str",
+ "host": "str",
+ "password": "str",
+ "port": 0,
+ "useSsl": bool,
+ "userName": "str",
+ },
+ "startedOn": "2020-02-20 00:00:00",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "targetMongoConnection": {
+ "connectionString": "str",
+ "host": "str",
+ "password": "str",
+ "port": 0,
+ "useSsl": bool,
+ "userName": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_mongo_to_cosmos_dbv_core_mongo_begin_delete(self, resource_group):
+ response = self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.begin_delete(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ migration_name="str",
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_mongo_to_cosmos_dbv_core_mongo_get_for_scope(self, resource_group):
+ response = self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.get_for_scope(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations_async.py
new file mode 100644
index 000000000000..25d9a2735fda
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations_async.py
@@ -0,0 +1,128 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration.aio import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementDatabaseMigrationsMongoToCosmosDbvCoreMongoOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_mongo_to_cosmos_dbv_core_mongo_get(self, resource_group):
+ response = await self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.get(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ migration_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_mongo_to_cosmos_dbv_core_mongo_begin_create(self, resource_group):
+ response = await (
+ await self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.begin_create(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ migration_name="str",
+ parameters={
+ "collectionList": [
+ {
+ "migrationProgressDetails": {
+ "durationInSeconds": 0,
+ "migrationError": "str",
+ "migrationStatus": "str",
+ "processedDocumentCount": 0,
+ "sourceDocumentCount": 0,
+ },
+ "sourceCollection": "str",
+ "sourceDatabase": "str",
+ "targetCollection": "str",
+ "targetDatabase": "str",
+ }
+ ],
+ "endedOn": "2020-02-20 00:00:00",
+ "id": "str",
+ "migrationFailureError": {"code": "str", "message": "str"},
+ "migrationOperationId": "str",
+ "migrationService": "str",
+ "migrationStatus": "str",
+ "name": "str",
+ "provisioningError": "str",
+ "provisioningState": "str",
+ "scope": "str",
+ "sourceMongoConnection": {
+ "connectionString": "str",
+ "host": "str",
+ "password": "str",
+ "port": 0,
+ "useSsl": bool,
+ "userName": "str",
+ },
+ "startedOn": "2020-02-20 00:00:00",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "targetMongoConnection": {
+ "connectionString": "str",
+ "host": "str",
+ "password": "str",
+ "port": 0,
+ "useSsl": bool,
+ "userName": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_mongo_to_cosmos_dbv_core_mongo_begin_delete(self, resource_group):
+ response = await (
+ await self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.begin_delete(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ migration_name="str",
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_mongo_to_cosmos_dbv_core_mongo_get_for_scope(self, resource_group):
+ response = self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.get_for_scope(
+ resource_group_name=resource_group.name,
+ target_resource_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_db_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_db_operations.py
new file mode 100644
index 000000000000..ece2fb7b4908
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_db_operations.py
@@ -0,0 +1,137 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementDatabaseMigrationsSqlDbOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_sql_db_get(self, resource_group):
+ response = self.client.database_migrations_sql_db.get(
+ resource_group_name=resource_group.name,
+ sql_db_instance_name="str",
+ target_db_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_sql_db_begin_create_or_update(self, resource_group):
+ response = self.client.database_migrations_sql_db.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ sql_db_instance_name="str",
+ target_db_name="str",
+ parameters={
+ "id": "str",
+ "name": "str",
+ "properties": {
+ "kind": "SqlDb",
+ "endedOn": "2020-02-20 00:00:00",
+ "migrationFailureError": {"code": "str", "message": "str"},
+ "migrationOperationId": "str",
+ "migrationService": "str",
+ "migrationStatus": "str",
+ "migrationStatusDetails": {
+ "listOfCopyProgressDetails": [
+ {
+ "copyDuration": 0,
+ "copyStart": "2020-02-20 00:00:00",
+ "copyThroughput": 0.0,
+ "dataRead": 0,
+ "dataWritten": 0,
+ "parallelCopyType": "str",
+ "rowsCopied": 0,
+ "rowsRead": 0,
+ "status": "str",
+ "tableName": "str",
+ "usedParallelCopies": 0,
+ }
+ ],
+ "migrationState": "str",
+ "sqlDataCopyErrors": ["str"],
+ },
+ "offlineConfiguration": {"offline": bool},
+ "provisioningError": "str",
+ "provisioningState": "str",
+ "scope": "str",
+ "sourceDatabaseName": "str",
+ "sourceServerName": "str",
+ "sourceSqlConnection": {
+ "authentication": "str",
+ "dataSource": "str",
+ "encryptConnection": bool,
+ "password": "str",
+ "trustServerCertificate": bool,
+ "userName": "str",
+ },
+ "startedOn": "2020-02-20 00:00:00",
+ "tableList": ["str"],
+ "targetDatabaseCollation": "str",
+ "targetSqlConnection": {
+ "authentication": "str",
+ "dataSource": "str",
+ "encryptConnection": bool,
+ "password": "str",
+ "trustServerCertificate": bool,
+ "userName": "str",
+ },
+ },
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_sql_db_begin_delete(self, resource_group):
+ response = self.client.database_migrations_sql_db.begin_delete(
+ resource_group_name=resource_group.name,
+ sql_db_instance_name="str",
+ target_db_name="str",
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_sql_db_begin_cancel(self, resource_group):
+ response = self.client.database_migrations_sql_db.begin_cancel(
+ resource_group_name=resource_group.name,
+ sql_db_instance_name="str",
+ target_db_name="str",
+ parameters={"migrationOperationId": "str"},
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_db_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_db_operations_async.py
new file mode 100644
index 000000000000..6454529a6396
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_db_operations_async.py
@@ -0,0 +1,144 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration.aio import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementDatabaseMigrationsSqlDbOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_sql_db_get(self, resource_group):
+ response = await self.client.database_migrations_sql_db.get(
+ resource_group_name=resource_group.name,
+ sql_db_instance_name="str",
+ target_db_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_sql_db_begin_create_or_update(self, resource_group):
+ response = await (
+ await self.client.database_migrations_sql_db.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ sql_db_instance_name="str",
+ target_db_name="str",
+ parameters={
+ "id": "str",
+ "name": "str",
+ "properties": {
+ "kind": "SqlDb",
+ "endedOn": "2020-02-20 00:00:00",
+ "migrationFailureError": {"code": "str", "message": "str"},
+ "migrationOperationId": "str",
+ "migrationService": "str",
+ "migrationStatus": "str",
+ "migrationStatusDetails": {
+ "listOfCopyProgressDetails": [
+ {
+ "copyDuration": 0,
+ "copyStart": "2020-02-20 00:00:00",
+ "copyThroughput": 0.0,
+ "dataRead": 0,
+ "dataWritten": 0,
+ "parallelCopyType": "str",
+ "rowsCopied": 0,
+ "rowsRead": 0,
+ "status": "str",
+ "tableName": "str",
+ "usedParallelCopies": 0,
+ }
+ ],
+ "migrationState": "str",
+ "sqlDataCopyErrors": ["str"],
+ },
+ "offlineConfiguration": {"offline": bool},
+ "provisioningError": "str",
+ "provisioningState": "str",
+ "scope": "str",
+ "sourceDatabaseName": "str",
+ "sourceServerName": "str",
+ "sourceSqlConnection": {
+ "authentication": "str",
+ "dataSource": "str",
+ "encryptConnection": bool,
+ "password": "str",
+ "trustServerCertificate": bool,
+ "userName": "str",
+ },
+ "startedOn": "2020-02-20 00:00:00",
+ "tableList": ["str"],
+ "targetDatabaseCollation": "str",
+ "targetSqlConnection": {
+ "authentication": "str",
+ "dataSource": "str",
+ "encryptConnection": bool,
+ "password": "str",
+ "trustServerCertificate": bool,
+ "userName": "str",
+ },
+ },
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_sql_db_begin_delete(self, resource_group):
+ response = await (
+ await self.client.database_migrations_sql_db.begin_delete(
+ resource_group_name=resource_group.name,
+ sql_db_instance_name="str",
+ target_db_name="str",
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_sql_db_begin_cancel(self, resource_group):
+ response = await (
+ await self.client.database_migrations_sql_db.begin_cancel(
+ resource_group_name=resource_group.name,
+ sql_db_instance_name="str",
+ target_db_name="str",
+ parameters={"migrationOperationId": "str"},
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_mi_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_mi_operations.py
new file mode 100644
index 000000000000..a0508c0db30c
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_mi_operations.py
@@ -0,0 +1,215 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementDatabaseMigrationsSqlMiOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_sql_mi_get(self, resource_group):
+ response = self.client.database_migrations_sql_mi.get(
+ resource_group_name=resource_group.name,
+ managed_instance_name="str",
+ target_db_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_sql_mi_begin_create_or_update(self, resource_group):
+ response = self.client.database_migrations_sql_mi.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ managed_instance_name="str",
+ target_db_name="str",
+ parameters={
+ "id": "str",
+ "name": "str",
+ "properties": {
+ "kind": "SqlMi",
+ "backupConfiguration": {
+ "sourceLocation": {
+ "azureBlob": {
+ "accountKey": "str",
+ "authType": "str",
+ "blobContainerName": "str",
+ "identity": {
+ "type": "str",
+ "principalId": "str",
+ "tenantId": "str",
+ "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}},
+ },
+ "storageAccountResourceId": "str",
+ },
+ "fileShare": {"password": "str", "path": "str", "username": "str"},
+ "fileStorageType": "str",
+ },
+ "targetLocation": {"accountKey": "str", "storageAccountResourceId": "str"},
+ },
+ "endedOn": "2020-02-20 00:00:00",
+ "migrationFailureError": {"code": "str", "message": "str"},
+ "migrationOperationId": "str",
+ "migrationService": "str",
+ "migrationStatus": "str",
+ "migrationStatusDetails": {
+ "activeBackupSets": [
+ {
+ "backupFinishDate": "2020-02-20 00:00:00",
+ "backupSetId": "str",
+ "backupStartDate": "2020-02-20 00:00:00",
+ "backupType": "str",
+ "familyCount": 0,
+ "firstLSN": "str",
+ "hasBackupChecksums": bool,
+ "ignoreReasons": ["str"],
+ "isBackupRestored": bool,
+ "lastLSN": "str",
+ "listOfBackupFiles": [
+ {
+ "copyDuration": 0,
+ "copyThroughput": 0.0,
+ "dataRead": 0,
+ "dataWritten": 0,
+ "familySequenceNumber": 0,
+ "fileName": "str",
+ "status": "str",
+ "totalSize": 0,
+ }
+ ],
+ }
+ ],
+ "blobContainerName": "str",
+ "completeRestoreErrorMessage": "str",
+ "currentRestoringFilename": "str",
+ "fileUploadBlockingErrors": ["str"],
+ "fullBackupSetInfo": {
+ "backupFinishDate": "2020-02-20 00:00:00",
+ "backupSetId": "str",
+ "backupStartDate": "2020-02-20 00:00:00",
+ "backupType": "str",
+ "familyCount": 0,
+ "firstLSN": "str",
+ "hasBackupChecksums": bool,
+ "ignoreReasons": ["str"],
+ "isBackupRestored": bool,
+ "lastLSN": "str",
+ "listOfBackupFiles": [
+ {
+ "copyDuration": 0,
+ "copyThroughput": 0.0,
+ "dataRead": 0,
+ "dataWritten": 0,
+ "familySequenceNumber": 0,
+ "fileName": "str",
+ "status": "str",
+ "totalSize": 0,
+ }
+ ],
+ },
+ "invalidFiles": ["str"],
+ "isFullBackupRestored": bool,
+ "lastRestoredBackupSetInfo": {
+ "backupFinishDate": "2020-02-20 00:00:00",
+ "backupSetId": "str",
+ "backupStartDate": "2020-02-20 00:00:00",
+ "backupType": "str",
+ "familyCount": 0,
+ "firstLSN": "str",
+ "hasBackupChecksums": bool,
+ "ignoreReasons": ["str"],
+ "isBackupRestored": bool,
+ "lastLSN": "str",
+ "listOfBackupFiles": [
+ {
+ "copyDuration": 0,
+ "copyThroughput": 0.0,
+ "dataRead": 0,
+ "dataWritten": 0,
+ "familySequenceNumber": 0,
+ "fileName": "str",
+ "status": "str",
+ "totalSize": 0,
+ }
+ ],
+ },
+ "lastRestoredFilename": "str",
+ "migrationState": "str",
+ "pendingLogBackupsCount": 0,
+ "restoreBlockingReason": "str",
+ },
+ "offlineConfiguration": {"lastBackupName": "str", "offline": bool},
+ "provisioningError": "str",
+ "provisioningState": "str",
+ "scope": "str",
+ "sourceDatabaseName": "str",
+ "sourceServerName": "str",
+ "sourceSqlConnection": {
+ "authentication": "str",
+ "dataSource": "str",
+ "encryptConnection": bool,
+ "password": "str",
+ "trustServerCertificate": bool,
+ "userName": "str",
+ },
+ "startedOn": "2020-02-20 00:00:00",
+ "targetDatabaseCollation": "str",
+ },
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_sql_mi_begin_cancel(self, resource_group):
+ response = self.client.database_migrations_sql_mi.begin_cancel(
+ resource_group_name=resource_group.name,
+ managed_instance_name="str",
+ target_db_name="str",
+ parameters={"migrationOperationId": "str"},
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_sql_mi_begin_cutover(self, resource_group):
+ response = self.client.database_migrations_sql_mi.begin_cutover(
+ resource_group_name=resource_group.name,
+ managed_instance_name="str",
+ target_db_name="str",
+ parameters={"migrationOperationId": "str"},
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_mi_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_mi_operations_async.py
new file mode 100644
index 000000000000..7b5598e46782
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_mi_operations_async.py
@@ -0,0 +1,222 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration.aio import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementDatabaseMigrationsSqlMiOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_sql_mi_get(self, resource_group):
+ response = await self.client.database_migrations_sql_mi.get(
+ resource_group_name=resource_group.name,
+ managed_instance_name="str",
+ target_db_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_sql_mi_begin_create_or_update(self, resource_group):
+ response = await (
+ await self.client.database_migrations_sql_mi.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ managed_instance_name="str",
+ target_db_name="str",
+ parameters={
+ "id": "str",
+ "name": "str",
+ "properties": {
+ "kind": "SqlMi",
+ "backupConfiguration": {
+ "sourceLocation": {
+ "azureBlob": {
+ "accountKey": "str",
+ "authType": "str",
+ "blobContainerName": "str",
+ "identity": {
+ "type": "str",
+ "principalId": "str",
+ "tenantId": "str",
+ "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}},
+ },
+ "storageAccountResourceId": "str",
+ },
+ "fileShare": {"password": "str", "path": "str", "username": "str"},
+ "fileStorageType": "str",
+ },
+ "targetLocation": {"accountKey": "str", "storageAccountResourceId": "str"},
+ },
+ "endedOn": "2020-02-20 00:00:00",
+ "migrationFailureError": {"code": "str", "message": "str"},
+ "migrationOperationId": "str",
+ "migrationService": "str",
+ "migrationStatus": "str",
+ "migrationStatusDetails": {
+ "activeBackupSets": [
+ {
+ "backupFinishDate": "2020-02-20 00:00:00",
+ "backupSetId": "str",
+ "backupStartDate": "2020-02-20 00:00:00",
+ "backupType": "str",
+ "familyCount": 0,
+ "firstLSN": "str",
+ "hasBackupChecksums": bool,
+ "ignoreReasons": ["str"],
+ "isBackupRestored": bool,
+ "lastLSN": "str",
+ "listOfBackupFiles": [
+ {
+ "copyDuration": 0,
+ "copyThroughput": 0.0,
+ "dataRead": 0,
+ "dataWritten": 0,
+ "familySequenceNumber": 0,
+ "fileName": "str",
+ "status": "str",
+ "totalSize": 0,
+ }
+ ],
+ }
+ ],
+ "blobContainerName": "str",
+ "completeRestoreErrorMessage": "str",
+ "currentRestoringFilename": "str",
+ "fileUploadBlockingErrors": ["str"],
+ "fullBackupSetInfo": {
+ "backupFinishDate": "2020-02-20 00:00:00",
+ "backupSetId": "str",
+ "backupStartDate": "2020-02-20 00:00:00",
+ "backupType": "str",
+ "familyCount": 0,
+ "firstLSN": "str",
+ "hasBackupChecksums": bool,
+ "ignoreReasons": ["str"],
+ "isBackupRestored": bool,
+ "lastLSN": "str",
+ "listOfBackupFiles": [
+ {
+ "copyDuration": 0,
+ "copyThroughput": 0.0,
+ "dataRead": 0,
+ "dataWritten": 0,
+ "familySequenceNumber": 0,
+ "fileName": "str",
+ "status": "str",
+ "totalSize": 0,
+ }
+ ],
+ },
+ "invalidFiles": ["str"],
+ "isFullBackupRestored": bool,
+ "lastRestoredBackupSetInfo": {
+ "backupFinishDate": "2020-02-20 00:00:00",
+ "backupSetId": "str",
+ "backupStartDate": "2020-02-20 00:00:00",
+ "backupType": "str",
+ "familyCount": 0,
+ "firstLSN": "str",
+ "hasBackupChecksums": bool,
+ "ignoreReasons": ["str"],
+ "isBackupRestored": bool,
+ "lastLSN": "str",
+ "listOfBackupFiles": [
+ {
+ "copyDuration": 0,
+ "copyThroughput": 0.0,
+ "dataRead": 0,
+ "dataWritten": 0,
+ "familySequenceNumber": 0,
+ "fileName": "str",
+ "status": "str",
+ "totalSize": 0,
+ }
+ ],
+ },
+ "lastRestoredFilename": "str",
+ "migrationState": "str",
+ "pendingLogBackupsCount": 0,
+ "restoreBlockingReason": "str",
+ },
+ "offlineConfiguration": {"lastBackupName": "str", "offline": bool},
+ "provisioningError": "str",
+ "provisioningState": "str",
+ "scope": "str",
+ "sourceDatabaseName": "str",
+ "sourceServerName": "str",
+ "sourceSqlConnection": {
+ "authentication": "str",
+ "dataSource": "str",
+ "encryptConnection": bool,
+ "password": "str",
+ "trustServerCertificate": bool,
+ "userName": "str",
+ },
+ "startedOn": "2020-02-20 00:00:00",
+ "targetDatabaseCollation": "str",
+ },
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_sql_mi_begin_cancel(self, resource_group):
+ response = await (
+ await self.client.database_migrations_sql_mi.begin_cancel(
+ resource_group_name=resource_group.name,
+ managed_instance_name="str",
+ target_db_name="str",
+ parameters={"migrationOperationId": "str"},
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_sql_mi_begin_cutover(self, resource_group):
+ response = await (
+ await self.client.database_migrations_sql_mi.begin_cutover(
+ resource_group_name=resource_group.name,
+ managed_instance_name="str",
+ target_db_name="str",
+ parameters={"migrationOperationId": "str"},
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_vm_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_vm_operations.py
new file mode 100644
index 000000000000..b617523ae8e4
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_vm_operations.py
@@ -0,0 +1,215 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementDatabaseMigrationsSqlVmOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_sql_vm_get(self, resource_group):
+ response = self.client.database_migrations_sql_vm.get(
+ resource_group_name=resource_group.name,
+ sql_virtual_machine_name="str",
+ target_db_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_sql_vm_begin_create_or_update(self, resource_group):
+ response = self.client.database_migrations_sql_vm.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ sql_virtual_machine_name="str",
+ target_db_name="str",
+ parameters={
+ "id": "str",
+ "name": "str",
+ "properties": {
+ "kind": "SqlVm",
+ "backupConfiguration": {
+ "sourceLocation": {
+ "azureBlob": {
+ "accountKey": "str",
+ "authType": "str",
+ "blobContainerName": "str",
+ "identity": {
+ "type": "str",
+ "principalId": "str",
+ "tenantId": "str",
+ "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}},
+ },
+ "storageAccountResourceId": "str",
+ },
+ "fileShare": {"password": "str", "path": "str", "username": "str"},
+ "fileStorageType": "str",
+ },
+ "targetLocation": {"accountKey": "str", "storageAccountResourceId": "str"},
+ },
+ "endedOn": "2020-02-20 00:00:00",
+ "migrationFailureError": {"code": "str", "message": "str"},
+ "migrationOperationId": "str",
+ "migrationService": "str",
+ "migrationStatus": "str",
+ "migrationStatusDetails": {
+ "activeBackupSets": [
+ {
+ "backupFinishDate": "2020-02-20 00:00:00",
+ "backupSetId": "str",
+ "backupStartDate": "2020-02-20 00:00:00",
+ "backupType": "str",
+ "familyCount": 0,
+ "firstLSN": "str",
+ "hasBackupChecksums": bool,
+ "ignoreReasons": ["str"],
+ "isBackupRestored": bool,
+ "lastLSN": "str",
+ "listOfBackupFiles": [
+ {
+ "copyDuration": 0,
+ "copyThroughput": 0.0,
+ "dataRead": 0,
+ "dataWritten": 0,
+ "familySequenceNumber": 0,
+ "fileName": "str",
+ "status": "str",
+ "totalSize": 0,
+ }
+ ],
+ }
+ ],
+ "blobContainerName": "str",
+ "completeRestoreErrorMessage": "str",
+ "currentRestoringFilename": "str",
+ "fileUploadBlockingErrors": ["str"],
+ "fullBackupSetInfo": {
+ "backupFinishDate": "2020-02-20 00:00:00",
+ "backupSetId": "str",
+ "backupStartDate": "2020-02-20 00:00:00",
+ "backupType": "str",
+ "familyCount": 0,
+ "firstLSN": "str",
+ "hasBackupChecksums": bool,
+ "ignoreReasons": ["str"],
+ "isBackupRestored": bool,
+ "lastLSN": "str",
+ "listOfBackupFiles": [
+ {
+ "copyDuration": 0,
+ "copyThroughput": 0.0,
+ "dataRead": 0,
+ "dataWritten": 0,
+ "familySequenceNumber": 0,
+ "fileName": "str",
+ "status": "str",
+ "totalSize": 0,
+ }
+ ],
+ },
+ "invalidFiles": ["str"],
+ "isFullBackupRestored": bool,
+ "lastRestoredBackupSetInfo": {
+ "backupFinishDate": "2020-02-20 00:00:00",
+ "backupSetId": "str",
+ "backupStartDate": "2020-02-20 00:00:00",
+ "backupType": "str",
+ "familyCount": 0,
+ "firstLSN": "str",
+ "hasBackupChecksums": bool,
+ "ignoreReasons": ["str"],
+ "isBackupRestored": bool,
+ "lastLSN": "str",
+ "listOfBackupFiles": [
+ {
+ "copyDuration": 0,
+ "copyThroughput": 0.0,
+ "dataRead": 0,
+ "dataWritten": 0,
+ "familySequenceNumber": 0,
+ "fileName": "str",
+ "status": "str",
+ "totalSize": 0,
+ }
+ ],
+ },
+ "lastRestoredFilename": "str",
+ "migrationState": "str",
+ "pendingLogBackupsCount": 0,
+ "restoreBlockingReason": "str",
+ },
+ "offlineConfiguration": {"lastBackupName": "str", "offline": bool},
+ "provisioningError": "str",
+ "provisioningState": "str",
+ "scope": "str",
+ "sourceDatabaseName": "str",
+ "sourceServerName": "str",
+ "sourceSqlConnection": {
+ "authentication": "str",
+ "dataSource": "str",
+ "encryptConnection": bool,
+ "password": "str",
+ "trustServerCertificate": bool,
+ "userName": "str",
+ },
+ "startedOn": "2020-02-20 00:00:00",
+ "targetDatabaseCollation": "str",
+ },
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_sql_vm_begin_cancel(self, resource_group):
+ response = self.client.database_migrations_sql_vm.begin_cancel(
+ resource_group_name=resource_group.name,
+ sql_virtual_machine_name="str",
+ target_db_name="str",
+ parameters={"migrationOperationId": "str"},
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_database_migrations_sql_vm_begin_cutover(self, resource_group):
+ response = self.client.database_migrations_sql_vm.begin_cutover(
+ resource_group_name=resource_group.name,
+ sql_virtual_machine_name="str",
+ target_db_name="str",
+ parameters={"migrationOperationId": "str"},
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_vm_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_vm_operations_async.py
new file mode 100644
index 000000000000..806cd2a98760
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_vm_operations_async.py
@@ -0,0 +1,222 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration.aio import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementDatabaseMigrationsSqlVmOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_sql_vm_get(self, resource_group):
+ response = await self.client.database_migrations_sql_vm.get(
+ resource_group_name=resource_group.name,
+ sql_virtual_machine_name="str",
+ target_db_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_sql_vm_begin_create_or_update(self, resource_group):
+ response = await (
+ await self.client.database_migrations_sql_vm.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ sql_virtual_machine_name="str",
+ target_db_name="str",
+ parameters={
+ "id": "str",
+ "name": "str",
+ "properties": {
+ "kind": "SqlVm",
+ "backupConfiguration": {
+ "sourceLocation": {
+ "azureBlob": {
+ "accountKey": "str",
+ "authType": "str",
+ "blobContainerName": "str",
+ "identity": {
+ "type": "str",
+ "principalId": "str",
+ "tenantId": "str",
+ "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}},
+ },
+ "storageAccountResourceId": "str",
+ },
+ "fileShare": {"password": "str", "path": "str", "username": "str"},
+ "fileStorageType": "str",
+ },
+ "targetLocation": {"accountKey": "str", "storageAccountResourceId": "str"},
+ },
+ "endedOn": "2020-02-20 00:00:00",
+ "migrationFailureError": {"code": "str", "message": "str"},
+ "migrationOperationId": "str",
+ "migrationService": "str",
+ "migrationStatus": "str",
+ "migrationStatusDetails": {
+ "activeBackupSets": [
+ {
+ "backupFinishDate": "2020-02-20 00:00:00",
+ "backupSetId": "str",
+ "backupStartDate": "2020-02-20 00:00:00",
+ "backupType": "str",
+ "familyCount": 0,
+ "firstLSN": "str",
+ "hasBackupChecksums": bool,
+ "ignoreReasons": ["str"],
+ "isBackupRestored": bool,
+ "lastLSN": "str",
+ "listOfBackupFiles": [
+ {
+ "copyDuration": 0,
+ "copyThroughput": 0.0,
+ "dataRead": 0,
+ "dataWritten": 0,
+ "familySequenceNumber": 0,
+ "fileName": "str",
+ "status": "str",
+ "totalSize": 0,
+ }
+ ],
+ }
+ ],
+ "blobContainerName": "str",
+ "completeRestoreErrorMessage": "str",
+ "currentRestoringFilename": "str",
+ "fileUploadBlockingErrors": ["str"],
+ "fullBackupSetInfo": {
+ "backupFinishDate": "2020-02-20 00:00:00",
+ "backupSetId": "str",
+ "backupStartDate": "2020-02-20 00:00:00",
+ "backupType": "str",
+ "familyCount": 0,
+ "firstLSN": "str",
+ "hasBackupChecksums": bool,
+ "ignoreReasons": ["str"],
+ "isBackupRestored": bool,
+ "lastLSN": "str",
+ "listOfBackupFiles": [
+ {
+ "copyDuration": 0,
+ "copyThroughput": 0.0,
+ "dataRead": 0,
+ "dataWritten": 0,
+ "familySequenceNumber": 0,
+ "fileName": "str",
+ "status": "str",
+ "totalSize": 0,
+ }
+ ],
+ },
+ "invalidFiles": ["str"],
+ "isFullBackupRestored": bool,
+ "lastRestoredBackupSetInfo": {
+ "backupFinishDate": "2020-02-20 00:00:00",
+ "backupSetId": "str",
+ "backupStartDate": "2020-02-20 00:00:00",
+ "backupType": "str",
+ "familyCount": 0,
+ "firstLSN": "str",
+ "hasBackupChecksums": bool,
+ "ignoreReasons": ["str"],
+ "isBackupRestored": bool,
+ "lastLSN": "str",
+ "listOfBackupFiles": [
+ {
+ "copyDuration": 0,
+ "copyThroughput": 0.0,
+ "dataRead": 0,
+ "dataWritten": 0,
+ "familySequenceNumber": 0,
+ "fileName": "str",
+ "status": "str",
+ "totalSize": 0,
+ }
+ ],
+ },
+ "lastRestoredFilename": "str",
+ "migrationState": "str",
+ "pendingLogBackupsCount": 0,
+ "restoreBlockingReason": "str",
+ },
+ "offlineConfiguration": {"lastBackupName": "str", "offline": bool},
+ "provisioningError": "str",
+ "provisioningState": "str",
+ "scope": "str",
+ "sourceDatabaseName": "str",
+ "sourceServerName": "str",
+ "sourceSqlConnection": {
+ "authentication": "str",
+ "dataSource": "str",
+ "encryptConnection": bool,
+ "password": "str",
+ "trustServerCertificate": bool,
+ "userName": "str",
+ },
+ "startedOn": "2020-02-20 00:00:00",
+ "targetDatabaseCollation": "str",
+ },
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_sql_vm_begin_cancel(self, resource_group):
+ response = await (
+ await self.client.database_migrations_sql_vm.begin_cancel(
+ resource_group_name=resource_group.name,
+ sql_virtual_machine_name="str",
+ target_db_name="str",
+ parameters={"migrationOperationId": "str"},
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_database_migrations_sql_vm_begin_cutover(self, resource_group):
+ response = await (
+ await self.client.database_migrations_sql_vm.begin_cutover(
+ resource_group_name=resource_group.name,
+ sql_virtual_machine_name="str",
+ target_db_name="str",
+ parameters={"migrationOperationId": "str"},
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_files_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_files_operations.py
new file mode 100644
index 000000000000..7d1aad4c8dc6
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_files_operations.py
@@ -0,0 +1,158 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementFilesOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_files_list(self, resource_group):
+ response = self.client.files.list(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_files_get(self, resource_group):
+ response = self.client.files.get(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ file_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_files_create_or_update(self, resource_group):
+ response = self.client.files.create_or_update(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ file_name="str",
+ parameters={
+ "etag": "str",
+ "id": "str",
+ "name": "str",
+ "properties": {
+ "extension": "str",
+ "filePath": "str",
+ "lastModified": "2020-02-20 00:00:00",
+ "mediaType": "str",
+ "size": 0,
+ },
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_files_delete(self, resource_group):
+ response = self.client.files.delete(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ file_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_files_update(self, resource_group):
+ response = self.client.files.update(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ file_name="str",
+ parameters={
+ "etag": "str",
+ "id": "str",
+ "name": "str",
+ "properties": {
+ "extension": "str",
+ "filePath": "str",
+ "lastModified": "2020-02-20 00:00:00",
+ "mediaType": "str",
+ "size": 0,
+ },
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_files_read(self, resource_group):
+ response = self.client.files.read(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ file_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_files_read_write(self, resource_group):
+ response = self.client.files.read_write(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ file_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_files_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_files_operations_async.py
new file mode 100644
index 000000000000..7474f00d6ba5
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_files_operations_async.py
@@ -0,0 +1,159 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration.aio import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementFilesOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_files_list(self, resource_group):
+ response = self.client.files.list(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_files_get(self, resource_group):
+ response = await self.client.files.get(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ file_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_files_create_or_update(self, resource_group):
+ response = await self.client.files.create_or_update(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ file_name="str",
+ parameters={
+ "etag": "str",
+ "id": "str",
+ "name": "str",
+ "properties": {
+ "extension": "str",
+ "filePath": "str",
+ "lastModified": "2020-02-20 00:00:00",
+ "mediaType": "str",
+ "size": 0,
+ },
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_files_delete(self, resource_group):
+ response = await self.client.files.delete(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ file_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_files_update(self, resource_group):
+ response = await self.client.files.update(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ file_name="str",
+ parameters={
+ "etag": "str",
+ "id": "str",
+ "name": "str",
+ "properties": {
+ "extension": "str",
+ "filePath": "str",
+ "lastModified": "2020-02-20 00:00:00",
+ "mediaType": "str",
+ "size": 0,
+ },
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_files_read(self, resource_group):
+ response = await self.client.files.read(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ file_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_files_read_write(self, resource_group):
+ response = await self.client.files.read_write(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ file_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_migration_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_migration_services_operations.py
new file mode 100644
index 000000000000..bc05b93c8549
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_migration_services_operations.py
@@ -0,0 +1,118 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementMigrationServicesOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_migration_services_get(self, resource_group):
+ response = self.client.migration_services.get(
+ resource_group_name=resource_group.name,
+ migration_service_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_migration_services_begin_create_or_update(self, resource_group):
+ response = self.client.migration_services.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ migration_service_name="str",
+ parameters={
+ "location": "str",
+ "id": "str",
+ "integrationRuntimeState": "str",
+ "name": "str",
+ "provisioningState": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_migration_services_begin_delete(self, resource_group):
+ response = self.client.migration_services.begin_delete(
+ resource_group_name=resource_group.name,
+ migration_service_name="str",
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_migration_services_begin_update(self, resource_group):
+ response = self.client.migration_services.begin_update(
+ resource_group_name=resource_group.name,
+ migration_service_name="str",
+ parameters={"tags": {"str": "str"}},
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_migration_services_list_by_resource_group(self, resource_group):
+ response = self.client.migration_services.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_migration_services_list_by_subscription(self, resource_group):
+ response = self.client.migration_services.list_by_subscription(
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_migration_services_list_migrations(self, resource_group):
+ response = self.client.migration_services.list_migrations(
+ resource_group_name=resource_group.name,
+ migration_service_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_migration_services_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_migration_services_operations_async.py
new file mode 100644
index 000000000000..472d8cdfad63
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_migration_services_operations_async.py
@@ -0,0 +1,125 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration.aio import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementMigrationServicesOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_migration_services_get(self, resource_group):
+ response = await self.client.migration_services.get(
+ resource_group_name=resource_group.name,
+ migration_service_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_migration_services_begin_create_or_update(self, resource_group):
+ response = await (
+ await self.client.migration_services.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ migration_service_name="str",
+ parameters={
+ "location": "str",
+ "id": "str",
+ "integrationRuntimeState": "str",
+ "name": "str",
+ "provisioningState": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_migration_services_begin_delete(self, resource_group):
+ response = await (
+ await self.client.migration_services.begin_delete(
+ resource_group_name=resource_group.name,
+ migration_service_name="str",
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_migration_services_begin_update(self, resource_group):
+ response = await (
+ await self.client.migration_services.begin_update(
+ resource_group_name=resource_group.name,
+ migration_service_name="str",
+ parameters={"tags": {"str": "str"}},
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_migration_services_list_by_resource_group(self, resource_group):
+ response = self.client.migration_services.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_migration_services_list_by_subscription(self, resource_group):
+ response = self.client.migration_services.list_by_subscription(
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_migration_services_list_migrations(self, resource_group):
+ response = self.client.migration_services.list_migrations(
+ resource_group_name=resource_group.name,
+ migration_service_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_operations.py
new file mode 100644
index 000000000000..7f4c1ff0fec4
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_operations.py
@@ -0,0 +1,29 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_operations_list(self, resource_group):
+ response = self.client.operations.list(
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_operations_async.py
new file mode 100644
index 000000000000..9e0b23e37077
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_operations_async.py
@@ -0,0 +1,30 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration.aio import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_operations_list(self, resource_group):
+ response = self.client.operations.list(
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_projects_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_projects_operations.py
new file mode 100644
index 000000000000..e926cf0072b6
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_projects_operations.py
@@ -0,0 +1,141 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementProjectsOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_projects_list(self, resource_group):
+ response = self.client.projects.list(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_projects_create_or_update(self, resource_group):
+ response = self.client.projects.create_or_update(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ parameters={
+ "azureAuthenticationInfo": {
+ "appKey": "str",
+ "applicationId": "str",
+ "ignoreAzurePermissions": bool,
+ "tenantId": "str",
+ },
+ "creationTime": "2020-02-20 00:00:00",
+ "databasesInfo": [{"sourceDatabaseName": "str"}],
+ "etag": "str",
+ "id": "str",
+ "location": "str",
+ "name": "str",
+ "provisioningState": "str",
+ "sourceConnectionInfo": "connection_info",
+ "sourcePlatform": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "targetConnectionInfo": "connection_info",
+ "targetPlatform": "str",
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_projects_get(self, resource_group):
+ response = self.client.projects.get(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_projects_delete(self, resource_group):
+ response = self.client.projects.delete(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_projects_update(self, resource_group):
+ response = self.client.projects.update(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ parameters={
+ "azureAuthenticationInfo": {
+ "appKey": "str",
+ "applicationId": "str",
+ "ignoreAzurePermissions": bool,
+ "tenantId": "str",
+ },
+ "creationTime": "2020-02-20 00:00:00",
+ "databasesInfo": [{"sourceDatabaseName": "str"}],
+ "etag": "str",
+ "id": "str",
+ "location": "str",
+ "name": "str",
+ "provisioningState": "str",
+ "sourceConnectionInfo": "connection_info",
+ "sourcePlatform": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "targetConnectionInfo": "connection_info",
+ "targetPlatform": "str",
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_projects_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_projects_operations_async.py
new file mode 100644
index 000000000000..5e226e08daa9
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_projects_operations_async.py
@@ -0,0 +1,142 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration.aio import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementProjectsOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_projects_list(self, resource_group):
+ response = self.client.projects.list(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_projects_create_or_update(self, resource_group):
+ response = await self.client.projects.create_or_update(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ parameters={
+ "azureAuthenticationInfo": {
+ "appKey": "str",
+ "applicationId": "str",
+ "ignoreAzurePermissions": bool,
+ "tenantId": "str",
+ },
+ "creationTime": "2020-02-20 00:00:00",
+ "databasesInfo": [{"sourceDatabaseName": "str"}],
+ "etag": "str",
+ "id": "str",
+ "location": "str",
+ "name": "str",
+ "provisioningState": "str",
+ "sourceConnectionInfo": "connection_info",
+ "sourcePlatform": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "targetConnectionInfo": "connection_info",
+ "targetPlatform": "str",
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_projects_get(self, resource_group):
+ response = await self.client.projects.get(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_projects_delete(self, resource_group):
+ response = await self.client.projects.delete(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_projects_update(self, resource_group):
+ response = await self.client.projects.update(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ parameters={
+ "azureAuthenticationInfo": {
+ "appKey": "str",
+ "applicationId": "str",
+ "ignoreAzurePermissions": bool,
+ "tenantId": "str",
+ },
+ "creationTime": "2020-02-20 00:00:00",
+ "databasesInfo": [{"sourceDatabaseName": "str"}],
+ "etag": "str",
+ "id": "str",
+ "location": "str",
+ "name": "str",
+ "provisioningState": "str",
+ "sourceConnectionInfo": "connection_info",
+ "sourcePlatform": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "targetConnectionInfo": "connection_info",
+ "targetPlatform": "str",
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_resource_skus_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_resource_skus_operations.py
new file mode 100644
index 000000000000..2c81a0c14c54
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_resource_skus_operations.py
@@ -0,0 +1,29 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementResourceSkusOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_resource_skus_list_skus(self, resource_group):
+ response = self.client.resource_skus.list_skus(
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_resource_skus_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_resource_skus_operations_async.py
new file mode 100644
index 000000000000..9dae94a659be
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_resource_skus_operations_async.py
@@ -0,0 +1,30 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration.aio import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementResourceSkusOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_resource_skus_list_skus(self, resource_group):
+ response = self.client.resource_skus.list_skus(
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_service_tasks_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_service_tasks_operations.py
new file mode 100644
index 000000000000..9903e72648a3
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_service_tasks_operations.py
@@ -0,0 +1,126 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementServiceTasksOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_service_tasks_list(self, resource_group):
+ response = self.client.service_tasks.list(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_service_tasks_create_or_update(self, resource_group):
+ response = self.client.service_tasks.create_or_update(
+ group_name="str",
+ service_name="str",
+ task_name="str",
+ parameters={
+ "etag": "str",
+ "id": "str",
+ "name": "str",
+ "properties": "project_task_properties",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_service_tasks_get(self, resource_group):
+ response = self.client.service_tasks.get(
+ group_name="str",
+ service_name="str",
+ task_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_service_tasks_delete(self, resource_group):
+ response = self.client.service_tasks.delete(
+ group_name="str",
+ service_name="str",
+ task_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_service_tasks_update(self, resource_group):
+ response = self.client.service_tasks.update(
+ group_name="str",
+ service_name="str",
+ task_name="str",
+ parameters={
+ "etag": "str",
+ "id": "str",
+ "name": "str",
+ "properties": "project_task_properties",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_service_tasks_cancel(self, resource_group):
+ response = self.client.service_tasks.cancel(
+ group_name="str",
+ service_name="str",
+ task_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_service_tasks_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_service_tasks_operations_async.py
new file mode 100644
index 000000000000..fae7ef806aaf
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_service_tasks_operations_async.py
@@ -0,0 +1,127 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration.aio import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementServiceTasksOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_service_tasks_list(self, resource_group):
+ response = self.client.service_tasks.list(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_service_tasks_create_or_update(self, resource_group):
+ response = await self.client.service_tasks.create_or_update(
+ group_name="str",
+ service_name="str",
+ task_name="str",
+ parameters={
+ "etag": "str",
+ "id": "str",
+ "name": "str",
+ "properties": "project_task_properties",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_service_tasks_get(self, resource_group):
+ response = await self.client.service_tasks.get(
+ group_name="str",
+ service_name="str",
+ task_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_service_tasks_delete(self, resource_group):
+ response = await self.client.service_tasks.delete(
+ group_name="str",
+ service_name="str",
+ task_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_service_tasks_update(self, resource_group):
+ response = await self.client.service_tasks.update(
+ group_name="str",
+ service_name="str",
+ task_name="str",
+ parameters={
+ "etag": "str",
+ "id": "str",
+ "name": "str",
+ "properties": "project_task_properties",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_service_tasks_cancel(self, resource_group):
+ response = await self.client.service_tasks.cancel(
+ group_name="str",
+ service_name="str",
+ task_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_services_operations.py
new file mode 100644
index 000000000000..1c25614daee2
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_services_operations.py
@@ -0,0 +1,209 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementServicesOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_services_begin_create_or_update(self, resource_group):
+ response = self.client.services.begin_create_or_update(
+ group_name="str",
+ service_name="str",
+ parameters={
+ "autoStopDelay": "str",
+ "deleteResourcesOnStop": bool,
+ "etag": "str",
+ "id": "str",
+ "kind": "str",
+ "location": "str",
+ "name": "str",
+ "provisioningState": "str",
+ "publicKey": "str",
+ "sku": {"capacity": 0, "family": "str", "name": "str", "size": "str", "tier": "str"},
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ "virtualNicId": "str",
+ "virtualSubnetId": "str",
+ },
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_services_get(self, resource_group):
+ response = self.client.services.get(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_services_begin_delete(self, resource_group):
+ response = self.client.services.begin_delete(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_services_begin_update(self, resource_group):
+ response = self.client.services.begin_update(
+ group_name="str",
+ service_name="str",
+ parameters={
+ "autoStopDelay": "str",
+ "deleteResourcesOnStop": bool,
+ "etag": "str",
+ "id": "str",
+ "kind": "str",
+ "location": "str",
+ "name": "str",
+ "provisioningState": "str",
+ "publicKey": "str",
+ "sku": {"capacity": 0, "family": "str", "name": "str", "size": "str", "tier": "str"},
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ "virtualNicId": "str",
+ "virtualSubnetId": "str",
+ },
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_services_check_status(self, resource_group):
+ response = self.client.services.check_status(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_services_begin_start(self, resource_group):
+ response = self.client.services.begin_start(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_services_begin_stop(self, resource_group):
+ response = self.client.services.begin_stop(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_services_list_skus(self, resource_group):
+ response = self.client.services.list_skus(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_services_check_children_name_availability(self, resource_group):
+ response = self.client.services.check_children_name_availability(
+ group_name="str",
+ service_name="str",
+ parameters={"name": "str", "type": "str"},
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_services_list_by_resource_group(self, resource_group):
+ response = self.client.services.list_by_resource_group(
+ group_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_services_list(self, resource_group):
+ response = self.client.services.list(
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_services_check_name_availability(self, resource_group):
+ response = self.client.services.check_name_availability(
+ location="str",
+ parameters={"name": "str", "type": "str"},
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_services_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_services_operations_async.py
new file mode 100644
index 000000000000..e86bbf2d5769
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_services_operations_async.py
@@ -0,0 +1,220 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration.aio import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementServicesOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_services_begin_create_or_update(self, resource_group):
+ response = await (
+ await self.client.services.begin_create_or_update(
+ group_name="str",
+ service_name="str",
+ parameters={
+ "autoStopDelay": "str",
+ "deleteResourcesOnStop": bool,
+ "etag": "str",
+ "id": "str",
+ "kind": "str",
+ "location": "str",
+ "name": "str",
+ "provisioningState": "str",
+ "publicKey": "str",
+ "sku": {"capacity": 0, "family": "str", "name": "str", "size": "str", "tier": "str"},
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ "virtualNicId": "str",
+ "virtualSubnetId": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_services_get(self, resource_group):
+ response = await self.client.services.get(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_services_begin_delete(self, resource_group):
+ response = await (
+ await self.client.services.begin_delete(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_services_begin_update(self, resource_group):
+ response = await (
+ await self.client.services.begin_update(
+ group_name="str",
+ service_name="str",
+ parameters={
+ "autoStopDelay": "str",
+ "deleteResourcesOnStop": bool,
+ "etag": "str",
+ "id": "str",
+ "kind": "str",
+ "location": "str",
+ "name": "str",
+ "provisioningState": "str",
+ "publicKey": "str",
+ "sku": {"capacity": 0, "family": "str", "name": "str", "size": "str", "tier": "str"},
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ "virtualNicId": "str",
+ "virtualSubnetId": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_services_check_status(self, resource_group):
+ response = await self.client.services.check_status(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_services_begin_start(self, resource_group):
+ response = await (
+ await self.client.services.begin_start(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_services_begin_stop(self, resource_group):
+ response = await (
+ await self.client.services.begin_stop(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_services_list_skus(self, resource_group):
+ response = self.client.services.list_skus(
+ group_name="str",
+ service_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_services_check_children_name_availability(self, resource_group):
+ response = await self.client.services.check_children_name_availability(
+ group_name="str",
+ service_name="str",
+ parameters={"name": "str", "type": "str"},
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_services_list_by_resource_group(self, resource_group):
+ response = self.client.services.list_by_resource_group(
+ group_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_services_list(self, resource_group):
+ response = self.client.services.list(
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_services_check_name_availability(self, resource_group):
+ response = await self.client.services.check_name_availability(
+ location="str",
+ parameters={"name": "str", "type": "str"},
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_sql_migration_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_sql_migration_services_operations.py
new file mode 100644
index 000000000000..6c767c6b5e67
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_sql_migration_services_operations.py
@@ -0,0 +1,168 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementSqlMigrationServicesOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_sql_migration_services_get(self, resource_group):
+ response = self.client.sql_migration_services.get(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_sql_migration_services_begin_create_or_update(self, resource_group):
+ response = self.client.sql_migration_services.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ parameters={
+ "id": "str",
+ "integrationRuntimeState": "str",
+ "location": "str",
+ "name": "str",
+ "provisioningState": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_sql_migration_services_begin_delete(self, resource_group):
+ response = self.client.sql_migration_services.begin_delete(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_sql_migration_services_begin_update(self, resource_group):
+ response = self.client.sql_migration_services.begin_update(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ parameters={"tags": {"str": "str"}},
+ api_version="2025-03-15-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_sql_migration_services_list_by_resource_group(self, resource_group):
+ response = self.client.sql_migration_services.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_sql_migration_services_list_auth_keys(self, resource_group):
+ response = self.client.sql_migration_services.list_auth_keys(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_sql_migration_services_regenerate_auth_keys(self, resource_group):
+ response = self.client.sql_migration_services.regenerate_auth_keys(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ parameters={"authKey1": "str", "authKey2": "str", "keyName": "str"},
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_sql_migration_services_delete_node(self, resource_group):
+ response = self.client.sql_migration_services.delete_node(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ parameters={"integrationRuntimeName": "str", "nodeName": "str"},
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_sql_migration_services_list_migrations(self, resource_group):
+ response = self.client.sql_migration_services.list_migrations(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_sql_migration_services_list_monitoring_data(self, resource_group):
+ response = self.client.sql_migration_services.list_monitoring_data(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_sql_migration_services_list_by_subscription(self, resource_group):
+ response = self.client.sql_migration_services.list_by_subscription(
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_sql_migration_services_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_sql_migration_services_operations_async.py
new file mode 100644
index 000000000000..7057fd27bb37
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_sql_migration_services_operations_async.py
@@ -0,0 +1,175 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration.aio import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementSqlMigrationServicesOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_sql_migration_services_get(self, resource_group):
+ response = await self.client.sql_migration_services.get(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_sql_migration_services_begin_create_or_update(self, resource_group):
+ response = await (
+ await self.client.sql_migration_services.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ parameters={
+ "id": "str",
+ "integrationRuntimeState": "str",
+ "location": "str",
+ "name": "str",
+ "provisioningState": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_sql_migration_services_begin_delete(self, resource_group):
+ response = await (
+ await self.client.sql_migration_services.begin_delete(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_sql_migration_services_begin_update(self, resource_group):
+ response = await (
+ await self.client.sql_migration_services.begin_update(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ parameters={"tags": {"str": "str"}},
+ api_version="2025-03-15-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_sql_migration_services_list_by_resource_group(self, resource_group):
+ response = self.client.sql_migration_services.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_sql_migration_services_list_auth_keys(self, resource_group):
+ response = await self.client.sql_migration_services.list_auth_keys(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_sql_migration_services_regenerate_auth_keys(self, resource_group):
+ response = await self.client.sql_migration_services.regenerate_auth_keys(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ parameters={"authKey1": "str", "authKey2": "str", "keyName": "str"},
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_sql_migration_services_delete_node(self, resource_group):
+ response = await self.client.sql_migration_services.delete_node(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ parameters={"integrationRuntimeName": "str", "nodeName": "str"},
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_sql_migration_services_list_migrations(self, resource_group):
+ response = self.client.sql_migration_services.list_migrations(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_sql_migration_services_list_monitoring_data(self, resource_group):
+ response = await self.client.sql_migration_services.list_monitoring_data(
+ resource_group_name=resource_group.name,
+ sql_migration_service_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_sql_migration_services_list_by_subscription(self, resource_group):
+ response = self.client.sql_migration_services.list_by_subscription(
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_tasks_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_tasks_operations.py
new file mode 100644
index 000000000000..4062d896f072
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_tasks_operations.py
@@ -0,0 +1,164 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementTasksOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_tasks_list(self, resource_group):
+ response = self.client.tasks.list(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_tasks_create_or_update(self, resource_group):
+ response = self.client.tasks.create_or_update(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ task_name="str",
+ parameters={
+ "etag": "str",
+ "id": "str",
+ "name": "str",
+ "properties": "project_task_properties",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_tasks_get(self, resource_group):
+ response = self.client.tasks.get(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ task_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_tasks_delete(self, resource_group):
+ response = self.client.tasks.delete(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ task_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_tasks_update(self, resource_group):
+ response = self.client.tasks.update(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ task_name="str",
+ parameters={
+ "etag": "str",
+ "id": "str",
+ "name": "str",
+ "properties": "project_task_properties",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_tasks_cancel(self, resource_group):
+ response = self.client.tasks.cancel(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ task_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_tasks_command(self, resource_group):
+ response = self.client.tasks.command(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ task_name="str",
+ parameters={
+ "commandType": "Migrate.SqlServer.AzureDbSqlMi.Complete",
+ "errors": [{"code": "str", "details": [...], "message": "str"}],
+ "input": {"sourceDatabaseName": "str"},
+ "output": {
+ "errors": [
+ {
+ "actionableMessage": "str",
+ "filePath": "str",
+ "hResult": 0,
+ "lineNumber": "str",
+ "message": "str",
+ "stackTrace": "str",
+ }
+ ]
+ },
+ "state": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_tasks_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_tasks_operations_async.py
new file mode 100644
index 000000000000..91a52d8ef7cc
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_tasks_operations_async.py
@@ -0,0 +1,165 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration.aio import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementTasksOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_tasks_list(self, resource_group):
+ response = self.client.tasks.list(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_tasks_create_or_update(self, resource_group):
+ response = await self.client.tasks.create_or_update(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ task_name="str",
+ parameters={
+ "etag": "str",
+ "id": "str",
+ "name": "str",
+ "properties": "project_task_properties",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_tasks_get(self, resource_group):
+ response = await self.client.tasks.get(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ task_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_tasks_delete(self, resource_group):
+ response = await self.client.tasks.delete(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ task_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_tasks_update(self, resource_group):
+ response = await self.client.tasks.update(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ task_name="str",
+ parameters={
+ "etag": "str",
+ "id": "str",
+ "name": "str",
+ "properties": "project_task_properties",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_tasks_cancel(self, resource_group):
+ response = await self.client.tasks.cancel(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ task_name="str",
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_tasks_command(self, resource_group):
+ response = await self.client.tasks.command(
+ group_name="str",
+ service_name="str",
+ project_name="str",
+ task_name="str",
+ parameters={
+ "commandType": "Migrate.SqlServer.AzureDbSqlMi.Complete",
+ "errors": [{"code": "str", "details": [...], "message": "str"}],
+ "input": {"sourceDatabaseName": "str"},
+ "output": {
+ "errors": [
+ {
+ "actionableMessage": "str",
+ "filePath": "str",
+ "hResult": 0,
+ "lineNumber": "str",
+ "message": "str",
+ "stackTrace": "str",
+ }
+ ]
+ },
+ "state": "str",
+ },
+ api_version="2025-03-15-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_usages_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_usages_operations.py
new file mode 100644
index 000000000000..c44814599265
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_usages_operations.py
@@ -0,0 +1,30 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementUsagesOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_usages_list(self, resource_group):
+ response = self.client.usages.list(
+ location="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_usages_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_usages_operations_async.py
new file mode 100644
index 000000000000..37c88820f6e3
--- /dev/null
+++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_usages_operations_async.py
@@ -0,0 +1,31 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.datamigration.aio import DataMigrationManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDataMigrationManagementUsagesOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_usages_list(self, resource_group):
+ response = self.client.usages.list(
+ location="str",
+ api_version="2025-03-15-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/datamigration/azure-mgmt-datamigration/sdk_packaging.toml b/sdk/datamigration/azure-mgmt-datamigration/sdk_packaging.toml
index 55a05b85c1ed..683baa22ed52 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/sdk_packaging.toml
+++ b/sdk/datamigration/azure-mgmt-datamigration/sdk_packaging.toml
@@ -2,5 +2,5 @@
package_name = "azure-mgmt-datamigration"
package_pprint_name = "Data Migration"
package_doc_id = ""
-is_stable = true
+is_stable = false
title = "DataMigrationManagementClient"
diff --git a/sdk/datamigration/azure-mgmt-datamigration/setup.py b/sdk/datamigration/azure-mgmt-datamigration/setup.py
index a08d35aa86a5..8c023ece64fa 100644
--- a/sdk/datamigration/azure-mgmt-datamigration/setup.py
+++ b/sdk/datamigration/azure-mgmt-datamigration/setup.py
@@ -1,10 +1,10 @@
#!/usr/bin/env python
-#-------------------------------------------------------------------------
+# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
-#--------------------------------------------------------------------------
+# --------------------------------------------------------------------------
import re
import os.path
@@ -16,64 +16,68 @@
PACKAGE_PPRINT_NAME = "Data Migration"
# a-b-c => a/b/c
-package_folder_path = PACKAGE_NAME.replace('-', '/')
+package_folder_path = PACKAGE_NAME.replace("-", "/")
# a-b-c => a.b.c
-namespace_name = PACKAGE_NAME.replace('-', '.')
+namespace_name = PACKAGE_NAME.replace("-", ".")
# Version extraction inspired from 'requests'
-with open(os.path.join(package_folder_path, 'version.py')
- if os.path.exists(os.path.join(package_folder_path, 'version.py'))
- else os.path.join(package_folder_path, '_version.py'), 'r') as fd:
- version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
- fd.read(), re.MULTILINE).group(1)
+with open(
+ os.path.join(package_folder_path, "version.py")
+ if os.path.exists(os.path.join(package_folder_path, "version.py"))
+ else os.path.join(package_folder_path, "_version.py"),
+ "r",
+) as fd:
+ version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1)
if not version:
- raise RuntimeError('Cannot find version information')
+ raise RuntimeError("Cannot find version information")
-with open('README.md', encoding='utf-8') as f:
+with open("README.md", encoding="utf-8") as f:
readme = f.read()
-with open('CHANGELOG.md', encoding='utf-8') as f:
+with open("CHANGELOG.md", encoding="utf-8") as f:
changelog = f.read()
setup(
name=PACKAGE_NAME,
version=version,
- description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
- long_description=readme + '\n\n' + changelog,
- long_description_content_type='text/markdown',
- license='MIT License',
- author='Microsoft Corporation',
- author_email='azpysdkhelp@microsoft.com',
- url='https://github.com/Azure/azure-sdk-for-python',
+ description="Microsoft Azure {} Client Library for Python".format(PACKAGE_PPRINT_NAME),
+ long_description=readme + "\n\n" + changelog,
+ long_description_content_type="text/markdown",
+ license="MIT License",
+ author="Microsoft Corporation",
+ author_email="azpysdkhelp@microsoft.com",
+ url="https://github.com/Azure/azure-sdk-for-python",
keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product
classifiers=[
- 'Development Status :: 5 - Production/Stable',
- 'Programming Language :: Python',
- 'Programming Language :: Python :: 3 :: Only',
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.7',
- 'Programming Language :: Python :: 3.8',
- 'Programming Language :: Python :: 3.9',
- 'Programming Language :: Python :: 3.10',
- 'Programming Language :: Python :: 3.11',
- 'License :: OSI Approved :: MIT License',
+ "Development Status :: 4 - Beta",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3 :: Only",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "License :: OSI Approved :: MIT License",
],
zip_safe=False,
- packages=find_packages(exclude=[
- 'tests',
- # Exclude packages that will be covered by PEP420 or nspkg
- 'azure',
- 'azure.mgmt',
- ]),
+ packages=find_packages(
+ exclude=[
+ "tests",
+ # Exclude packages that will be covered by PEP420 or nspkg
+ "azure",
+ "azure.mgmt",
+ ]
+ ),
include_package_data=True,
package_data={
- 'pytyped': ['py.typed'],
+ "pytyped": ["py.typed"],
},
install_requires=[
- "msrest>=0.7.1",
- "azure-common~=1.1",
- "azure-mgmt-core>=1.3.2,<2.0.0",
- "typing-extensions>=4.3.0; python_version<'3.8.0'",
+ "isodate>=0.6.1",
+ "typing-extensions>=4.6.0",
+ "azure-common>=1.1",
+ "azure-mgmt-core>=1.3.2",
],
- python_requires=">=3.7"
+ python_requires=">=3.8",
)