diff --git a/sdk/datamigration/azure-mgmt-datamigration/README.md b/sdk/datamigration/azure-mgmt-datamigration/README.md index 7966b4ac6f02..6d2c9cb64429 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/README.md +++ b/sdk/datamigration/azure-mgmt-datamigration/README.md @@ -1,28 +1,61 @@ # Microsoft Azure SDK for Python This is the Microsoft Azure Data Migration Client Library. -This package has been tested with Python 3.7+. +This package has been tested with Python 3.8+. For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). ## _Disclaimer_ _Azure SDK Python packages support for Python 2.7 has ended 01 January 2022. For more information and questions, please refer to https://github.com/Azure/azure-sdk-for-python/issues/20691_ -# Usage +## Getting started +### Prerequisites -To learn how to use this package, see the [quickstart guide](https://aka.ms/azsdk/python/mgmt) - -For docs and references, see [Python SDK References](https://docs.microsoft.com/python/api/overview/azure/) -Code samples for this package can be found at [Data Migration](https://docs.microsoft.com/samples/browse/?languages=python&term=Getting%20started%20-%20Managing&terms=Getting%20started%20-%20Managing) on docs.microsoft.com. -Additional code samples for different Azure services are available at [Samples Repo](https://aka.ms/azsdk/python/mgmt/samples) +- Python 3.8+ is required to use this package. +- [Azure subscription](https://azure.microsoft.com/free/) +### Install the package -# Provide Feedback +```bash +pip install azure-mgmt-datamigration +pip install azure-identity +``` + +### Authentication + +By default, [Azure Active Directory](https://aka.ms/awps/aad) token authentication depends on correct configure of following environment variables. + +- `AZURE_CLIENT_ID` for Azure client ID. +- `AZURE_TENANT_ID` for Azure tenant ID. +- `AZURE_CLIENT_SECRET` for Azure client secret. + +In addition, Azure subscription ID can be configured via environment variable `AZURE_SUBSCRIPTION_ID`. + +With above configuration, client can be authenticated by following code: + +```python +from azure.identity import DefaultAzureCredential +from azure.mgmt.datamigration import DataMigrationManagementClient +import os + +sub_id = os.getenv("AZURE_SUBSCRIPTION_ID") +client = DataMigrationManagementClient(credential=DefaultAzureCredential(), subscription_id=sub_id) +``` + +## Examples + +Code samples for this package can be found at: +- [Search Data Migration](https://docs.microsoft.com/samples/browse/?languages=python&term=Getting%20started%20-%20Managing&terms=Getting%20started%20-%20Managing) on docs.microsoft.com +- [Azure Python Mgmt SDK Samples Repo](https://aka.ms/azsdk/python/mgmt/samples) + + +## Troubleshooting + +## Next steps + +## Provide Feedback If you encounter any bugs or have suggestions, please file an issue in the [Issues](https://github.com/Azure/azure-sdk-for-python/issues) section of the project. - - -![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-python%2Fazure-mgmt-datamigration%2FREADME.png) diff --git a/sdk/datamigration/azure-mgmt-datamigration/_meta.json b/sdk/datamigration/azure-mgmt-datamigration/_meta.json index 26eab4282e26..78c2c3272661 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/_meta.json +++ b/sdk/datamigration/azure-mgmt-datamigration/_meta.json @@ -1,11 +1,11 @@ { - "commit": "3ce1e043e2d0e57016437a3870f40e33da8a6397", + "commit": "2df8525ab545127262d700400c7db781ba50eb17", "repository_url": "https://github.com/Azure/azure-rest-api-specs", - "autorest": "3.9.2", + "autorest": "3.10.2", "use": [ - "@autorest/python@6.2.7", - "@autorest/modelerfour@4.24.3" + "@autorest/python@6.19.0", + "@autorest/modelerfour@4.27.0" ], - "autorest_command": "autorest specification/datamigration/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.2.7 --use=@autorest/modelerfour@4.24.3 --version=3.9.2 --version-tolerant=False", + "autorest_command": "autorest specification/datamigration/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.19.0 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False", "readme": "specification/datamigration/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_configuration.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_configuration.py index 904245d9627a..fcfd6d2a66c9 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_configuration.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_configuration.py @@ -6,26 +6,19 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys from typing import Any, TYPE_CHECKING -from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy from ._version import VERSION -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports - if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials import TokenCredential -class DataMigrationManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes +class DataMigrationManagementClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long """Configuration for DataMigrationManagementClient. Note that all parameters used to create this instance are saved as instance @@ -35,14 +28,13 @@ class DataMigrationManagementClientConfiguration(Configuration): # pylint: disa :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: Subscription ID that identifies an Azure subscription. Required. :type subscription_id: str - :keyword api_version: Api Version. Default value is "2022-03-30-preview". Note that overriding + :keyword api_version: Api Version. Default value is "2023-07-15-preview". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: - super(DataMigrationManagementClientConfiguration, self).__init__(**kwargs) - api_version: Literal["2022-03-30-preview"] = kwargs.pop("api_version", "2022-03-30-preview") + api_version: str = kwargs.pop("api_version", "2023-07-15-preview") if credential is None: raise ValueError("Parameter 'credential' must not be None.") @@ -54,6 +46,7 @@ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs self.api_version = api_version self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) kwargs.setdefault("sdk_moniker", "mgmt-datamigration/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) def _configure(self, **kwargs: Any) -> None: @@ -62,9 +55,9 @@ def _configure(self, **kwargs: Any) -> None: self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: self.authentication_policy = ARMChallengeAuthenticationPolicy( diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_data_migration_management_client.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_data_migration_management_client.py index 76c834fd9a92..8eab0f240f37 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_data_migration_management_client.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_data_migration_management_client.py @@ -8,18 +8,24 @@ from copy import deepcopy from typing import Any, TYPE_CHECKING +from typing_extensions import Self +from azure.core.pipeline import policies from azure.core.rest import HttpRequest, HttpResponse from azure.mgmt.core import ARMPipelineClient +from azure.mgmt.core.policies import ARMAutoResourceProviderRegistrationPolicy from . import models as _models from ._configuration import DataMigrationManagementClientConfiguration from ._serialization import Deserializer, Serializer from .operations import ( + DatabaseMigrationsMongoToCosmosDbRUMongoOperations, + DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations, DatabaseMigrationsSqlDbOperations, DatabaseMigrationsSqlMiOperations, DatabaseMigrationsSqlVmOperations, FilesOperations, + MigrationServicesOperations, Operations, ProjectsOperations, ResourceSkusOperations, @@ -38,6 +44,14 @@ class DataMigrationManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes """Data Migration Client. + :ivar database_migrations_mongo_to_cosmos_db_ru_mongo: + DatabaseMigrationsMongoToCosmosDbRUMongoOperations operations + :vartype database_migrations_mongo_to_cosmos_db_ru_mongo: + azure.mgmt.datamigration.operations.DatabaseMigrationsMongoToCosmosDbRUMongoOperations + :ivar database_migrations_mongo_to_cosmos_dbv_core_mongo: + DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations operations + :vartype database_migrations_mongo_to_cosmos_dbv_core_mongo: + azure.mgmt.datamigration.operations.DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations :ivar database_migrations_sql_db: DatabaseMigrationsSqlDbOperations operations :vartype database_migrations_sql_db: azure.mgmt.datamigration.operations.DatabaseMigrationsSqlDbOperations @@ -49,6 +63,8 @@ class DataMigrationManagementClient: # pylint: disable=client-accepts-api-versi azure.mgmt.datamigration.operations.DatabaseMigrationsSqlVmOperations :ivar operations: Operations operations :vartype operations: azure.mgmt.datamigration.operations.Operations + :ivar migration_services: MigrationServicesOperations operations + :vartype migration_services: azure.mgmt.datamigration.operations.MigrationServicesOperations :ivar sql_migration_services: SqlMigrationServicesOperations operations :vartype sql_migration_services: azure.mgmt.datamigration.operations.SqlMigrationServicesOperations @@ -72,7 +88,7 @@ class DataMigrationManagementClient: # pylint: disable=client-accepts-api-versi :type subscription_id: str :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: Api Version. Default value is "2022-03-30-preview". Note that overriding + :keyword api_version: Api Version. Default value is "2023-07-15-preview". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no @@ -89,12 +105,36 @@ def __init__( self._config = DataMigrationManagementClientConfiguration( credential=credential, subscription_id=subscription_id, **kwargs ) - self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + ARMAutoResourceProviderRegistrationPolicy(), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, policies=_policies, **kwargs) client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False + self.database_migrations_mongo_to_cosmos_db_ru_mongo = DatabaseMigrationsMongoToCosmosDbRUMongoOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.database_migrations_mongo_to_cosmos_dbv_core_mongo = DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.database_migrations_sql_db = DatabaseMigrationsSqlDbOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -105,6 +145,9 @@ def __init__( self._client, self._config, self._serialize, self._deserialize ) self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.migration_services = MigrationServicesOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.sql_migration_services = SqlMigrationServicesOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -116,7 +159,7 @@ def __init__( self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) self.files = FilesOperations(self._client, self._config, self._serialize, self._deserialize) - def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: + def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest @@ -136,14 +179,14 @@ def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: request_copy = deepcopy(request) request_copy.url = self._client.format_url(request_copy.url) - return self._client.send_request(request_copy, **kwargs) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore def close(self) -> None: self._client.close() - def __enter__(self) -> "DataMigrationManagementClient": + def __enter__(self) -> Self: self._client.__enter__() return self - def __exit__(self, *exc_details) -> None: + def __exit__(self, *exc_details: Any) -> None: self._client.__exit__(*exc_details) diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_serialization.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_serialization.py index 2c170e28dbca..8139854b97bb 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_serialization.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_serialization.py @@ -38,7 +38,22 @@ import re import sys import codecs -from typing import Optional, Union, AnyStr, IO, Mapping +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + TypeVar, + MutableMapping, + Type, + List, + Mapping, +) try: from urllib import quote # type: ignore @@ -48,12 +63,14 @@ import isodate # type: ignore -from typing import Dict, Any, cast - -from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull _BOM = codecs.BOM_UTF8.decode(encoding="utf-8") +ModelType = TypeVar("ModelType", bound="Model") +JSON = MutableMapping[str, Any] + class RawDeserializer: @@ -107,7 +124,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: pass return ET.fromstring(data_as_str) # nosec - except ET.ParseError: + except ET.ParseError as err: # It might be because the server has an issue, and returned JSON with # content-type XML.... # So let's try a JSON load, and if it's still broken @@ -126,7 +143,9 @@ def _json_attemp(data): # The function hack is because Py2.7 messes up with exception # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") - raise_with_traceback(DeserializationError, "XML is invalid") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod @@ -153,13 +172,6 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], return None -try: - basestring # type: ignore - unicode_str = unicode # type: ignore -except NameError: - basestring = str - unicode_str = str - _LOGGER = logging.getLogger(__name__) try: @@ -277,8 +289,8 @@ class Model(object): _attribute_map: Dict[str, Dict[str, Any]] = {} _validation: Dict[str, Dict[str, Any]] = {} - def __init__(self, **kwargs): - self.additional_properties = {} + def __init__(self, **kwargs: Any) -> None: + self.additional_properties: Optional[Dict[str, Any]] = {} for k in kwargs: if k not in self._attribute_map: _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) @@ -287,25 +299,25 @@ def __init__(self, **kwargs): else: setattr(self, k, kwargs[k]) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: """Compare objects by comparing all attributes.""" if isinstance(other, self.__class__): return self.__dict__ == other.__dict__ return False - def __ne__(self, other): + def __ne__(self, other: Any) -> bool: """Compare objects by comparing all attributes.""" return not self.__eq__(other) - def __str__(self): + def __str__(self) -> str: return str(self.__dict__) @classmethod - def enable_additional_properties_sending(cls): + def enable_additional_properties_sending(cls) -> None: cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} @classmethod - def is_xml_model(cls): + def is_xml_model(cls) -> bool: try: cls._xml_map # type: ignore except AttributeError: @@ -322,8 +334,8 @@ def _create_xml_node(cls): return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) - def serialize(self, keep_readonly=False, **kwargs): - """Return the JSON that would be sent to azure from this model. + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. @@ -334,10 +346,15 @@ def serialize(self, keep_readonly=False, **kwargs): :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) + return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore - def as_dict(self, keep_readonly=True, key_transformer=attribute_transformer, **kwargs): - """Return a dict that can be JSONify using json.dump. + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. Advanced usage might optionally use a callback as parameter: @@ -368,7 +385,7 @@ def my_key_transformer(key, attr_desc, value): :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) + return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore @classmethod def _infer_class_models(cls): @@ -384,7 +401,7 @@ def _infer_class_models(cls): return client_models @classmethod - def deserialize(cls, data, content_type=None): + def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType: """Parse a str using the RestAPI syntax and return a model. :param str data: A str using RestAPI structure. JSON by default. @@ -393,10 +410,15 @@ def deserialize(cls, data, content_type=None): :raises: DeserializationError if something went wrong """ deserializer = Deserializer(cls._infer_class_models()) - return deserializer(cls.__name__, data, content_type=content_type) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @classmethod - def from_dict(cls, data, key_extractors=None, content_type=None): + def from_dict( + cls: Type[ModelType], + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> ModelType: """Parse a dict using given key extractor return a model. By default consider key @@ -409,8 +431,8 @@ def from_dict(cls, data, key_extractors=None, content_type=None): :raises: DeserializationError if something went wrong """ deserializer = Deserializer(cls._infer_class_models()) - deserializer.key_extractors = ( - [ + deserializer.key_extractors = ( # type: ignore + [ # type: ignore attribute_key_case_insensitive_extractor, rest_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor, @@ -418,7 +440,7 @@ def from_dict(cls, data, key_extractors=None, content_type=None): if key_extractors is None else key_extractors ) - return deserializer(cls.__name__, data, content_type=content_type) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @classmethod def _flatten_subtype(cls, key, objects): @@ -518,7 +540,7 @@ class Serializer(object): "multiple": lambda x, y: x % y != 0, } - def __init__(self, classes=None): + def __init__(self, classes: Optional[Mapping[str, type]] = None): self.serialize_type = { "iso-8601": Serializer.serialize_iso, "rfc-1123": Serializer.serialize_rfc, @@ -534,7 +556,7 @@ def __init__(self, classes=None): "[]": self.serialize_iter, "{}": self.serialize_dict, } - self.dependencies = dict(classes) if classes else {} + self.dependencies: Dict[str, type] = dict(classes) if classes else {} self.key_transformer = full_restapi_key_transformer self.client_side_validation = True @@ -602,7 +624,7 @@ def _serialize(self, target_obj, data_type=None, **kwargs): if xml_desc.get("attr", False): if xml_ns: ET.register_namespace(xml_prefix, xml_ns) - xml_name = "{}{}".format(xml_ns, xml_name) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) serialized.set(xml_name, new_attr) # type: ignore continue if xml_desc.get("text", False): @@ -622,12 +644,11 @@ def _serialize(self, target_obj, data_type=None, **kwargs): else: # That's a basic type # Integrate namespace if necessary local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) - local_node.text = unicode_str(new_attr) + local_node.text = str(new_attr) serialized.append(local_node) # type: ignore else: # JSON for k in reversed(keys): # type: ignore - unflattened = {k: new_attr} - new_attr = unflattened + new_attr = {k: new_attr} _new_attr = new_attr _serialized = serialized @@ -636,12 +657,13 @@ def _serialize(self, target_obj, data_type=None, **kwargs): _serialized.update(_new_attr) # type: ignore _new_attr = _new_attr[k] # type: ignore _serialized = _serialized[k] - except ValueError: - continue + except ValueError as err: + if isinstance(err, SerializationError): + raise except (AttributeError, KeyError, TypeError) as err: msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) - raise_with_traceback(SerializationError, msg, err) + raise SerializationError(msg) from err else: return serialized @@ -656,8 +678,8 @@ def body(self, data, data_type, **kwargs): """ # Just in case this is a dict - internal_data_type = data_type.strip("[]{}") - internal_data_type = self.dependencies.get(internal_data_type, None) + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) try: is_xml_model_serialization = kwargs["is_xml"] except KeyError: @@ -683,7 +705,7 @@ def body(self, data, data_type, **kwargs): ] data = deserializer._deserialize(data_type, data) except DeserializationError as err: - raise_with_traceback(SerializationError, "Unable to build a model: " + str(err), err) + raise SerializationError("Unable to build a model: " + str(err)) from err return self._serialize(data, data_type, **kwargs) @@ -703,6 +725,7 @@ def url(self, name, data, data_type, **kwargs): if kwargs.get("skip_quote") is True: output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) else: output = quote(str(output), safe="") except SerializationError: @@ -715,7 +738,9 @@ def query(self, name, data, data_type, **kwargs): :param data: The data to be serialized. :param str data_type: The type to be serialized from. - :rtype: str + :keyword bool skip_quote: Whether to skip quote the serialized result. + Defaults to False. + :rtype: str, list :raises: TypeError if serialization fails. :raises: ValueError if data is None """ @@ -723,10 +748,8 @@ def query(self, name, data, data_type, **kwargs): # Treat the list aside, since we don't want to encode the div separator if data_type.startswith("["): internal_data_type = data_type[1:-1] - data = [self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" for d in data] - if not kwargs.get("skip_quote", False): - data = [quote(str(d), safe="") for d in data] - return str(self.serialize_iter(data, internal_data_type, **kwargs)) + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) # Not a list, regular serialization output = self.serialize_data(data, data_type, **kwargs) @@ -777,6 +800,8 @@ def serialize_data(self, data, data_type, **kwargs): raise ValueError("No value for given attribute") try: + if data is CoreNull: + return None if data_type in self.basic_types.values(): return self.serialize_basic(data, data_type, **kwargs) @@ -795,7 +820,7 @@ def serialize_data(self, data, data_type, **kwargs): except (ValueError, TypeError) as err: msg = "Unable to serialize value: {!r} as type: {!r}." - raise_with_traceback(SerializationError, msg.format(data, data_type), err) + raise SerializationError(msg.format(data, data_type)) from err else: return self._serialize(data, **kwargs) @@ -863,6 +888,8 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): not be None or empty. :param str div: If set, this str will be used to combine the elements in the iterable into a combined string. Default is 'None'. + :keyword bool do_quote: Whether to quote the serialized result of each iterable element. + Defaults to False. :rtype: list, str """ if isinstance(data, str): @@ -875,9 +902,14 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): for d in data: try: serialized.append(self.serialize_data(d, iter_type, **kwargs)) - except ValueError: + except ValueError as err: + if isinstance(err, SerializationError): + raise serialized.append(None) + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + if div: serialized = ["" if s is None else str(s) for s in serialized] serialized = div.join(serialized) @@ -922,7 +954,9 @@ def serialize_dict(self, attr, dict_type, **kwargs): for key, value in attr.items(): try: serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) - except ValueError: + except ValueError as err: + if isinstance(err, SerializationError): + raise serialized[self.serialize_unicode(key)] = None if "xml" in serialization_ctxt: @@ -955,7 +989,7 @@ def serialize_object(self, attr, **kwargs): return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) if obj_type is _long_type: return self.serialize_long(attr) - if obj_type is unicode_str: + if obj_type is str: return self.serialize_unicode(attr) if obj_type is datetime.datetime: return self.serialize_iso(attr) @@ -1132,10 +1166,10 @@ def serialize_iso(attr, **kwargs): return date + microseconds + "Z" except (ValueError, OverflowError) as err: msg = "Unable to serialize datetime object." - raise_with_traceback(SerializationError, msg, err) + raise SerializationError(msg) from err except AttributeError as err: msg = "ISO-8601 object must be valid Datetime object." - raise_with_traceback(TypeError, msg, err) + raise TypeError(msg) from err @staticmethod def serialize_unix(attr, **kwargs): @@ -1161,7 +1195,8 @@ def rest_key_extractor(attr, attr_desc, data): working_data = data while "." in key: - dict_keys = _FLATTEN.split(key) + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) if len(dict_keys) == 1: key = _decode_attribute_map_key(dict_keys[0]) break @@ -1170,7 +1205,6 @@ def rest_key_extractor(attr, attr_desc, data): if working_data is None: # If at any point while following flatten JSON path see None, it means # that all properties under are None as well - # https://github.com/Azure/msrest-for-python/issues/197 return None key = ".".join(dict_keys[1:]) @@ -1191,7 +1225,6 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data): if working_data is None: # If at any point while following flatten JSON path see None, it means # that all properties under are None as well - # https://github.com/Azure/msrest-for-python/issues/197 return None key = ".".join(dict_keys[1:]) @@ -1242,7 +1275,7 @@ def _extract_name_from_internal_type(internal_type): xml_name = internal_type_xml_map.get("name", internal_type.__name__) xml_ns = internal_type_xml_map.get("ns", None) if xml_ns: - xml_name = "{}{}".format(xml_ns, xml_name) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) return xml_name @@ -1266,7 +1299,7 @@ def xml_key_extractor(attr, attr_desc, data): # Integrate namespace if necessary xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) if xml_ns: - xml_name = "{}{}".format(xml_ns, xml_name) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) # If it's an attribute, that's simple if xml_desc.get("attr", False): @@ -1332,7 +1365,7 @@ class Deserializer(object): valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") - def __init__(self, classes=None): + def __init__(self, classes: Optional[Mapping[str, type]] = None): self.deserialize_type = { "iso-8601": Deserializer.deserialize_iso, "rfc-1123": Deserializer.deserialize_rfc, @@ -1352,7 +1385,7 @@ def __init__(self, classes=None): "duration": (isodate.Duration, datetime.timedelta), "iso-8601": (datetime.datetime), } - self.dependencies = dict(classes) if classes else {} + self.dependencies: Dict[str, type] = dict(classes) if classes else {} self.key_extractors = [rest_key_extractor, xml_key_extractor] # Additional properties only works if the "rest_key_extractor" is used to # extract the keys. Making it to work whatever the key extractor is too much @@ -1405,12 +1438,12 @@ def _deserialize(self, target_obj, data): response, class_name = self._classify_target(target_obj, data) - if isinstance(response, basestring): + if isinstance(response, str): return self.deserialize_data(data, response) elif isinstance(response, type) and issubclass(response, Enum): return self.deserialize_enum(data, response) - if data is None: + if data is None or data is CoreNull: return data try: attributes = response._attribute_map # type: ignore @@ -1442,7 +1475,7 @@ def _deserialize(self, target_obj, data): d_attrs[attr] = value except (AttributeError, TypeError, KeyError) as err: msg = "Unable to deserialize to object: " + class_name # type: ignore - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: additional_properties = self._build_additional_properties(attributes, data) return self._instantiate_model(response, d_attrs, additional_properties) @@ -1471,22 +1504,22 @@ def _classify_target(self, target, data): Once classification has been determined, initialize object. :param str target: The target object type to deserialize to. - :param str/dict data: The response data to deseralize. + :param str/dict data: The response data to deserialize. """ if target is None: return None, None - if isinstance(target, basestring): + if isinstance(target, str): try: target = self.dependencies[target] except KeyError: return target, target try: - target = target._classify(data, self.dependencies) + target = target._classify(data, self.dependencies) # type: ignore except AttributeError: pass # Target is not a Model, no classify - return target, target.__class__.__name__ + return target, target.__class__.__name__ # type: ignore def failsafe_deserialize(self, target_obj, data, content_type=None): """Ignores any errors encountered in deserialization, @@ -1496,7 +1529,7 @@ def failsafe_deserialize(self, target_obj, data, content_type=None): a deserialization error. :param str target_obj: The target object type to deserialize to. - :param str/dict data: The response data to deseralize. + :param str/dict data: The response data to deserialize. :param str content_type: Swagger "produces" if available. """ try: @@ -1539,7 +1572,7 @@ def _unpack_content(raw_data, content_type=None): if hasattr(raw_data, "_content_consumed"): return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) - if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"): + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore return raw_data @@ -1613,7 +1646,7 @@ def deserialize_data(self, data, data_type): except (ValueError, TypeError, AttributeError) as err: msg = "Unable to deserialize response data." msg += " Data: {}, {}".format(data, data_type) - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: return self._deserialize(obj_type, data) @@ -1661,7 +1694,7 @@ def deserialize_object(self, attr, **kwargs): if isinstance(attr, ET.Element): # Do no recurse on XML, just return the tree as-is return attr - if isinstance(attr, basestring): + if isinstance(attr, str): return self.deserialize_basic(attr, "str") obj_type = type(attr) if obj_type in self.basic_types: @@ -1718,7 +1751,7 @@ def deserialize_basic(self, attr, data_type): if data_type == "bool": if attr in [True, False, 1, 0]: return bool(attr) - elif isinstance(attr, basestring): + elif isinstance(attr, str): if attr.lower() in ["true", "1"]: return True elif attr.lower() in ["false", "0"]: @@ -1769,7 +1802,6 @@ def deserialize_enum(data, enum_obj): data = data.value if isinstance(data, int): # Workaround. We might consider remove it in the future. - # https://github.com/Azure/azure-rest-api-specs/issues/141 try: return list(enum_obj.__members__.values())[data] except IndexError: @@ -1823,10 +1855,10 @@ def deserialize_decimal(attr): if isinstance(attr, ET.Element): attr = attr.text try: - return decimal.Decimal(attr) # type: ignore + return decimal.Decimal(str(attr)) # type: ignore except decimal.DecimalException as err: msg = "Invalid decimal {}".format(attr) - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err @staticmethod def deserialize_long(attr): @@ -1854,7 +1886,7 @@ def deserialize_duration(attr): duration = isodate.parse_duration(attr) except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize duration object." - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: return duration @@ -1871,7 +1903,7 @@ def deserialize_date(attr): if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore raise DeserializationError("Date must have only digits and -. Received: %s" % attr) # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. - return isodate.parse_date(attr, defaultmonth=None, defaultday=None) + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) @staticmethod def deserialize_time(attr): @@ -1906,7 +1938,7 @@ def deserialize_rfc(attr): date_obj = date_obj.astimezone(tz=TZ_UTC) except ValueError as err: msg = "Cannot deserialize to rfc datetime object." - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: return date_obj @@ -1943,7 +1975,7 @@ def deserialize_iso(attr): raise OverflowError("Hit max or min date") except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize datetime object." - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: return date_obj @@ -1959,9 +1991,10 @@ def deserialize_unix(attr): if isinstance(attr, ET.Element): attr = int(attr.text) # type: ignore try: + attr = int(attr) date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) except ValueError as err: msg = "Cannot deserialize to unix datetime object." - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: return date_obj diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_vendor.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_vendor.py deleted file mode 100644 index 9aad73fc743e..000000000000 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_vendor.py +++ /dev/null @@ -1,27 +0,0 @@ -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.pipeline.transport import HttpRequest - - -def _convert_request(request, files=None): - data = request.content if not files else None - request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) - if files: - request.set_formdata_body(files) - return request - - -def _format_url_section(template, **kwargs): - components = template.split("/") - while components: - try: - return template.format(**kwargs) - except KeyError as key: - formatted_components = template.split("/") - components = [c for c in formatted_components if "{}".format(key.args[0]) not in c] - template = "/".join(components) diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_version.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_version.py index b5e2ac841400..e5754a47ce68 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_version.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "10.1.0b1" +VERSION = "1.0.0b1" diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_configuration.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_configuration.py index 4e5ee073f2d1..f709dea3c5b3 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_configuration.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_configuration.py @@ -6,26 +6,19 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys from typing import Any, TYPE_CHECKING -from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy from .._version import VERSION -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports - if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -class DataMigrationManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes +class DataMigrationManagementClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long """Configuration for DataMigrationManagementClient. Note that all parameters used to create this instance are saved as instance @@ -35,14 +28,13 @@ class DataMigrationManagementClientConfiguration(Configuration): # pylint: disa :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: Subscription ID that identifies an Azure subscription. Required. :type subscription_id: str - :keyword api_version: Api Version. Default value is "2022-03-30-preview". Note that overriding + :keyword api_version: Api Version. Default value is "2023-07-15-preview". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None: - super(DataMigrationManagementClientConfiguration, self).__init__(**kwargs) - api_version: Literal["2022-03-30-preview"] = kwargs.pop("api_version", "2022-03-30-preview") + api_version: str = kwargs.pop("api_version", "2023-07-15-preview") if credential is None: raise ValueError("Parameter 'credential' must not be None.") @@ -54,6 +46,7 @@ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **k self.api_version = api_version self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) kwargs.setdefault("sdk_moniker", "mgmt-datamigration/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) def _configure(self, **kwargs: Any) -> None: @@ -62,9 +55,9 @@ def _configure(self, **kwargs: Any) -> None: self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: self.authentication_policy = AsyncARMChallengeAuthenticationPolicy( diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_data_migration_management_client.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_data_migration_management_client.py index 786aa9042b06..a39aebdaaf06 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_data_migration_management_client.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/_data_migration_management_client.py @@ -8,18 +8,24 @@ from copy import deepcopy from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self +from azure.core.pipeline import policies from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient +from azure.mgmt.core.policies import AsyncARMAutoResourceProviderRegistrationPolicy from .. import models as _models from .._serialization import Deserializer, Serializer from ._configuration import DataMigrationManagementClientConfiguration from .operations import ( + DatabaseMigrationsMongoToCosmosDbRUMongoOperations, + DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations, DatabaseMigrationsSqlDbOperations, DatabaseMigrationsSqlMiOperations, DatabaseMigrationsSqlVmOperations, FilesOperations, + MigrationServicesOperations, Operations, ProjectsOperations, ResourceSkusOperations, @@ -38,6 +44,14 @@ class DataMigrationManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes """Data Migration Client. + :ivar database_migrations_mongo_to_cosmos_db_ru_mongo: + DatabaseMigrationsMongoToCosmosDbRUMongoOperations operations + :vartype database_migrations_mongo_to_cosmos_db_ru_mongo: + azure.mgmt.datamigration.aio.operations.DatabaseMigrationsMongoToCosmosDbRUMongoOperations + :ivar database_migrations_mongo_to_cosmos_dbv_core_mongo: + DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations operations + :vartype database_migrations_mongo_to_cosmos_dbv_core_mongo: + azure.mgmt.datamigration.aio.operations.DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations :ivar database_migrations_sql_db: DatabaseMigrationsSqlDbOperations operations :vartype database_migrations_sql_db: azure.mgmt.datamigration.aio.operations.DatabaseMigrationsSqlDbOperations @@ -49,6 +63,9 @@ class DataMigrationManagementClient: # pylint: disable=client-accepts-api-versi azure.mgmt.datamigration.aio.operations.DatabaseMigrationsSqlVmOperations :ivar operations: Operations operations :vartype operations: azure.mgmt.datamigration.aio.operations.Operations + :ivar migration_services: MigrationServicesOperations operations + :vartype migration_services: + azure.mgmt.datamigration.aio.operations.MigrationServicesOperations :ivar sql_migration_services: SqlMigrationServicesOperations operations :vartype sql_migration_services: azure.mgmt.datamigration.aio.operations.SqlMigrationServicesOperations @@ -72,7 +89,7 @@ class DataMigrationManagementClient: # pylint: disable=client-accepts-api-versi :type subscription_id: str :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: Api Version. Default value is "2022-03-30-preview". Note that overriding + :keyword api_version: Api Version. Default value is "2023-07-15-preview". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no @@ -89,12 +106,36 @@ def __init__( self._config = DataMigrationManagementClientConfiguration( credential=credential, subscription_id=subscription_id, **kwargs ) - self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + AsyncARMAutoResourceProviderRegistrationPolicy(), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, policies=_policies, **kwargs) client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False + self.database_migrations_mongo_to_cosmos_db_ru_mongo = DatabaseMigrationsMongoToCosmosDbRUMongoOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.database_migrations_mongo_to_cosmos_dbv_core_mongo = DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.database_migrations_sql_db = DatabaseMigrationsSqlDbOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -105,6 +146,9 @@ def __init__( self._client, self._config, self._serialize, self._deserialize ) self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.migration_services = MigrationServicesOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.sql_migration_services = SqlMigrationServicesOperations( self._client, self._config, self._serialize, self._deserialize ) @@ -116,7 +160,9 @@ def __init__( self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) self.files = FilesOperations(self._client, self._config, self._serialize, self._deserialize) - def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]: + def _send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest @@ -136,14 +182,14 @@ def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncH request_copy = deepcopy(request) request_copy.url = self._client.format_url(request_copy.url) - return self._client.send_request(request_copy, **kwargs) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "DataMigrationManagementClient": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self - async def __aexit__(self, *exc_details) -> None: + async def __aexit__(self, *exc_details: Any) -> None: await self._client.__aexit__(*exc_details) diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/__init__.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/__init__.py index 4825871afb87..c6181dc277d6 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/__init__.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/__init__.py @@ -6,10 +6,17 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from ._database_migrations_mongo_to_cosmos_db_ru_mongo_operations import ( + DatabaseMigrationsMongoToCosmosDbRUMongoOperations, +) +from ._database_migrations_mongo_to_cosmos_dbv_core_mongo_operations import ( + DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations, +) from ._database_migrations_sql_db_operations import DatabaseMigrationsSqlDbOperations from ._database_migrations_sql_mi_operations import DatabaseMigrationsSqlMiOperations from ._database_migrations_sql_vm_operations import DatabaseMigrationsSqlVmOperations from ._operations import Operations +from ._migration_services_operations import MigrationServicesOperations from ._sql_migration_services_operations import SqlMigrationServicesOperations from ._resource_skus_operations import ResourceSkusOperations from ._services_operations import ServicesOperations @@ -24,10 +31,13 @@ from ._patch import patch_sdk as _patch_sdk __all__ = [ + "DatabaseMigrationsMongoToCosmosDbRUMongoOperations", + "DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations", "DatabaseMigrationsSqlDbOperations", "DatabaseMigrationsSqlMiOperations", "DatabaseMigrationsSqlVmOperations", "Operations", + "MigrationServicesOperations", "SqlMigrationServicesOperations", "ResourceSkusOperations", "ServicesOperations", diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py new file mode 100644 index 000000000000..cdce9bd7649a --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py @@ -0,0 +1,551 @@ +# pylint: disable=too-many-lines,too-many-statements +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +import sys +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ...operations._database_migrations_mongo_to_cosmos_db_ru_mongo_operations import ( + build_create_request, + build_delete_request, + build_get_for_scope_request, + build_get_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class DatabaseMigrationsMongoToCosmosDbRUMongoOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.datamigration.aio.DataMigrationManagementClient`'s + :attr:`database_migrations_mongo_to_cosmos_db_ru_mongo` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, target_resource_name: str, migration_name: str, **kwargs: Any + ) -> _models.DatabaseMigrationCosmosDbMongo: + """Get Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :return: DatabaseMigrationCosmosDbMongo or the result of cls(response) + :rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None) + + _request = build_get_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_initial( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "DatabaseMigrationCosmosDbMongo") + + _request = build_create_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: _models.DatabaseMigrationCosmosDbMongo, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo]: + """Create or Update Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param parameters: Details of CosmosDB for Mongo API Migration resource. Required. + :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns either DatabaseMigrationCosmosDbMongo or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo]: + """Create or Update Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param parameters: Details of CosmosDB for Mongo API Migration resource. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns either DatabaseMigrationCosmosDbMongo or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo]: + """Create or Update Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param parameters: Details of CosmosDB for Mongo API Migration resource. Is either a + DatabaseMigrationCosmosDbMongo type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo or IO[bytes] + :return: An instance of AsyncLROPoller that returns either DatabaseMigrationCosmosDbMongo or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_initial( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + force: Optional[bool] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_delete_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + subscription_id=self._config.subscription_id, + force=force, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + force: Optional[bool] = None, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param force: Optional force delete boolean. If this is provided as true, migration will be + deleted even if active. Default value is None. + :type force: bool + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + force=force, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def get_for_scope( + self, resource_group_name: str, target_resource_name: str, **kwargs: Any + ) -> AsyncIterable["_models.DatabaseMigrationCosmosDbMongo"]: + """Get Database Migration resources for the scope. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :return: An iterator like instance of either DatabaseMigrationCosmosDbMongo or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DatabaseMigrationCosmosDbMongoListResult] = kwargs.pop("cls", None) + + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_get_for_scope_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("DatabaseMigrationCosmosDbMongoListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py new file mode 100644 index 000000000000..513a9758c614 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py @@ -0,0 +1,551 @@ +# pylint: disable=too-many-lines,too-many-statements +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +import sys +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ...operations._database_migrations_mongo_to_cosmos_dbv_core_mongo_operations import ( + build_create_request, + build_delete_request, + build_get_for_scope_request, + build_get_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.datamigration.aio.DataMigrationManagementClient`'s + :attr:`database_migrations_mongo_to_cosmos_dbv_core_mongo` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, target_resource_name: str, migration_name: str, **kwargs: Any + ) -> _models.DatabaseMigrationCosmosDbMongo: + """Get Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :return: DatabaseMigrationCosmosDbMongo or the result of cls(response) + :rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None) + + _request = build_get_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_initial( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "DatabaseMigrationCosmosDbMongo") + + _request = build_create_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: _models.DatabaseMigrationCosmosDbMongo, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo]: + """Create or Update Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param parameters: Details of CosmosDB for Mongo API Migration resource. Required. + :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns either DatabaseMigrationCosmosDbMongo or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo]: + """Create or Update Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param parameters: Details of CosmosDB for Mongo API Migration resource. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns either DatabaseMigrationCosmosDbMongo or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo]: + """Create or Update Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param parameters: Details of CosmosDB for Mongo API Migration resource. Is either a + DatabaseMigrationCosmosDbMongo type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo or IO[bytes] + :return: An instance of AsyncLROPoller that returns either DatabaseMigrationCosmosDbMongo or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_initial( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.DatabaseMigrationCosmosDbMongo]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + force: Optional[bool] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_delete_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + subscription_id=self._config.subscription_id, + force=force, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + force: Optional[bool] = None, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param force: Optional force delete boolean. If this is provided as true, migration will be + deleted even if active. Default value is None. + :type force: bool + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + force=force, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def get_for_scope( + self, resource_group_name: str, target_resource_name: str, **kwargs: Any + ) -> AsyncIterable["_models.DatabaseMigrationCosmosDbMongo"]: + """Get Database Migration resources for the scope. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :return: An iterator like instance of either DatabaseMigrationCosmosDbMongo or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DatabaseMigrationCosmosDbMongoListResult] = kwargs.pop("cls", None) + + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_get_for_scope_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("DatabaseMigrationCosmosDbMongoListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_db_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_db_operations.py index ebcfe8bdad64..bc8885b8fa65 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_db_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_db_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from typing import Any, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,19 +16,19 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._database_migrations_sql_db_operations import ( build_cancel_request, build_create_or_update_request, @@ -35,10 +36,10 @@ build_get_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -87,12 +88,11 @@ async def get( :type migration_operation_id: str :param expand: Complete migration details be included in the response. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DatabaseMigrationSqlDb or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -103,12 +103,10 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DatabaseMigrationSqlDb] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, sql_db_instance_name=sql_db_instance_name, target_db_name=target_db_name, @@ -116,15 +114,14 @@ async def get( migration_operation_id=migration_operation_id, expand=expand, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -133,26 +130,22 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response) + deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return deserialized # type: ignore async def _create_or_update_initial( self, resource_group_name: str, sql_db_instance_name: str, target_db_name: str, - parameters: Union[_models.DatabaseMigrationSqlDb, IO], + parameters: Union[_models.DatabaseMigrationSqlDb, IO[bytes]], **kwargs: Any - ) -> _models.DatabaseMigrationSqlDb: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -163,21 +156,19 @@ async def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DatabaseMigrationSqlDb] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DatabaseMigrationSqlDb") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, sql_db_instance_name=sql_db_instance_name, target_db_name=target_db_name, @@ -186,38 +177,34 @@ async def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } - @overload async def begin_create_or_update( self, @@ -243,14 +230,6 @@ async def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlDb or the result of cls(response) :rtype: @@ -264,7 +243,7 @@ async def begin_create_or_update( resource_group_name: str, sql_db_instance_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -279,18 +258,10 @@ async def begin_create_or_update( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Details of Sql Db migration resource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlDb or the result of cls(response) :rtype: @@ -304,7 +275,7 @@ async def begin_create_or_update( resource_group_name: str, sql_db_instance_name: str, target_db_name: str, - parameters: Union[_models.DatabaseMigrationSqlDb, IO], + parameters: Union[_models.DatabaseMigrationSqlDb, IO[bytes]], **kwargs: Any ) -> AsyncLROPoller[_models.DatabaseMigrationSqlDb]: """Create or Update Database Migration resource. @@ -316,20 +287,9 @@ async def begin_create_or_update( :type sql_db_instance_name: str :param target_db_name: The name of the target database. Required. :type target_db_name: str - :param parameters: Details of Sql Db migration resource. Is either a model type or a IO type. - Required. - :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Details of Sql Db migration resource. Is either a DatabaseMigrationSqlDb + type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb or IO[bytes] :return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlDb or the result of cls(response) :rtype: @@ -339,9 +299,7 @@ async def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DatabaseMigrationSqlDb] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -360,12 +318,13 @@ async def begin_create_or_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response) + deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -375,27 +334,25 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.DatabaseMigrationSqlDb].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return AsyncLROPoller[_models.DatabaseMigrationSqlDb]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) - async def _delete_initial( # pylint: disable=inconsistent-return-statements + async def _delete_initial( self, resource_group_name: str, sql_db_instance_name: str, target_db_name: str, force: Optional[bool] = None, **kwargs: Any - ) -> None: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -406,41 +363,43 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, sql_db_instance_name=sql_db_instance_name, target_db_name=target_db_name, subscription_id=self._config.subscription_id, force=force, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return deserialized # type: ignore @distributed_trace_async async def begin_delete( @@ -463,14 +422,6 @@ async def begin_delete( :param force: Optional force delete boolean. If this is provided as true, migration will be deleted even if active. Default value is None. :type force: bool - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -478,15 +429,13 @@ async def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, sql_db_instance_name=sql_db_instance_name, target_db_name=target_db_name, @@ -497,11 +446,12 @@ async def begin_delete( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -510,27 +460,23 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - async def _cancel_initial( # pylint: disable=inconsistent-return-statements + async def _cancel_initial( self, resource_group_name: str, sql_db_instance_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any - ) -> None: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -541,21 +487,19 @@ async def _cancel_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "MigrationOperationInput") - request = build_cancel_request( + _request = build_cancel_request( resource_group_name=resource_group_name, sql_db_instance_name=sql_db_instance_name, target_db_name=target_db_name, @@ -564,29 +508,33 @@ async def _cancel_initial( # pylint: disable=inconsistent-return-statements content_type=content_type, json=_json, content=_content, - template_url=self._cancel_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _cancel_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel" - } + return deserialized # type: ignore @overload async def begin_cancel( @@ -614,14 +562,6 @@ async def begin_cancel( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -633,7 +573,7 @@ async def begin_cancel( resource_group_name: str, sql_db_instance_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -649,18 +589,10 @@ async def begin_cancel( :type target_db_name: str :param parameters: Required migration operation ID for which cancel will be initiated. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -672,7 +604,7 @@ async def begin_cancel( resource_group_name: str, sql_db_instance_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any ) -> AsyncLROPoller[None]: """Stop on going migration for the database. @@ -685,19 +617,8 @@ async def begin_cancel( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Required migration operation ID for which cancel will be initiated. Is - either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + either a MigrationOperationInput type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes] :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -705,16 +626,14 @@ async def begin_cancel( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._cancel_initial( # type: ignore + raw_result = await self._cancel_initial( resource_group_name=resource_group_name, sql_db_instance_name=sql_db_instance_name, target_db_name=target_db_name, @@ -726,11 +645,12 @@ async def begin_cancel( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -739,14 +659,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_cancel.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_mi_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_mi_operations.py index 292a56ebb61f..afe6dfc4dc9d 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_mi_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_mi_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from typing import Any, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,19 +16,19 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._database_migrations_sql_mi_operations import ( build_cancel_request, build_create_or_update_request, @@ -35,10 +36,10 @@ build_get_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -87,12 +88,11 @@ async def get( :type migration_operation_id: str :param expand: Complete migration details be included in the response. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DatabaseMigrationSqlMi or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -103,12 +103,10 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DatabaseMigrationSqlMi] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, managed_instance_name=managed_instance_name, target_db_name=target_db_name, @@ -116,15 +114,14 @@ async def get( migration_operation_id=migration_operation_id, expand=expand, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -133,26 +130,22 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response) + deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return deserialized # type: ignore async def _create_or_update_initial( self, resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: Union[_models.DatabaseMigrationSqlMi, IO], + parameters: Union[_models.DatabaseMigrationSqlMi, IO[bytes]], **kwargs: Any - ) -> _models.DatabaseMigrationSqlMi: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -163,21 +156,19 @@ async def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DatabaseMigrationSqlMi] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DatabaseMigrationSqlMi") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, managed_instance_name=managed_instance_name, target_db_name=target_db_name, @@ -186,38 +177,34 @@ async def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } - @overload async def begin_create_or_update( self, @@ -243,14 +230,6 @@ async def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlMi or the result of cls(response) :rtype: @@ -264,7 +243,7 @@ async def begin_create_or_update( resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -279,18 +258,10 @@ async def begin_create_or_update( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Details of SqlMigrationService resource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlMi or the result of cls(response) :rtype: @@ -304,7 +275,7 @@ async def begin_create_or_update( resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: Union[_models.DatabaseMigrationSqlMi, IO], + parameters: Union[_models.DatabaseMigrationSqlMi, IO[bytes]], **kwargs: Any ) -> AsyncLROPoller[_models.DatabaseMigrationSqlMi]: """Create a new database migration to a given SQL Managed Instance. @@ -316,20 +287,9 @@ async def begin_create_or_update( :type managed_instance_name: str :param target_db_name: The name of the target database. Required. :type target_db_name: str - :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Details of SqlMigrationService resource. Is either a DatabaseMigrationSqlMi + type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi or IO[bytes] :return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlMi or the result of cls(response) :rtype: @@ -339,9 +299,7 @@ async def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DatabaseMigrationSqlMi] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -360,12 +318,13 @@ async def begin_create_or_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response) + deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -375,27 +334,25 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.DatabaseMigrationSqlMi].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return AsyncLROPoller[_models.DatabaseMigrationSqlMi]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) - async def _cancel_initial( # pylint: disable=inconsistent-return-statements + async def _cancel_initial( self, resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any - ) -> None: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -406,21 +363,19 @@ async def _cancel_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "MigrationOperationInput") - request = build_cancel_request( + _request = build_cancel_request( resource_group_name=resource_group_name, managed_instance_name=managed_instance_name, target_db_name=target_db_name, @@ -429,29 +384,33 @@ async def _cancel_initial( # pylint: disable=inconsistent-return-statements content_type=content_type, json=_json, content=_content, - template_url=self._cancel_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _cancel_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel" - } + return deserialized # type: ignore @overload async def begin_cancel( @@ -479,14 +438,6 @@ async def begin_cancel( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -498,7 +449,7 @@ async def begin_cancel( resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -514,18 +465,10 @@ async def begin_cancel( :type target_db_name: str :param parameters: Required migration operation ID for which cancel will be initiated. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -537,7 +480,7 @@ async def begin_cancel( resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any ) -> AsyncLROPoller[None]: """Stop in-progress database migration to SQL Managed Instance. @@ -550,19 +493,8 @@ async def begin_cancel( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Required migration operation ID for which cancel will be initiated. Is - either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + either a MigrationOperationInput type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes] :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -570,16 +502,14 @@ async def begin_cancel( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._cancel_initial( # type: ignore + raw_result = await self._cancel_initial( resource_group_name=resource_group_name, managed_instance_name=managed_instance_name, target_db_name=target_db_name, @@ -591,11 +521,12 @@ async def begin_cancel( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -604,27 +535,23 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_cancel.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - async def _cutover_initial( # pylint: disable=inconsistent-return-statements + async def _cutover_initial( self, resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any - ) -> None: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -635,21 +562,19 @@ async def _cutover_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "MigrationOperationInput") - request = build_cutover_request( + _request = build_cutover_request( resource_group_name=resource_group_name, managed_instance_name=managed_instance_name, target_db_name=target_db_name, @@ -658,29 +583,33 @@ async def _cutover_initial( # pylint: disable=inconsistent-return-statements content_type=content_type, json=_json, content=_content, - template_url=self._cutover_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _cutover_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover" - } + return deserialized # type: ignore @overload async def begin_cutover( @@ -708,14 +637,6 @@ async def begin_cutover( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -727,7 +648,7 @@ async def begin_cutover( resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -743,18 +664,10 @@ async def begin_cutover( :type target_db_name: str :param parameters: Required migration operation ID for which cutover will be initiated. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -766,7 +679,7 @@ async def begin_cutover( resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any ) -> AsyncLROPoller[None]: """Initiate cutover for in-progress online database migration to SQL Managed Instance. @@ -779,19 +692,8 @@ async def begin_cutover( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Required migration operation ID for which cutover will be initiated. Is - either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + either a MigrationOperationInput type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes] :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -799,16 +701,14 @@ async def begin_cutover( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._cutover_initial( # type: ignore + raw_result = await self._cutover_initial( resource_group_name=resource_group_name, managed_instance_name=managed_instance_name, target_db_name=target_db_name, @@ -820,11 +720,12 @@ async def begin_cutover( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -833,14 +734,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_cutover.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_vm_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_vm_operations.py index 97c1b71108e0..ae1d37b93c44 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_vm_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_database_migrations_sql_vm_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from typing import Any, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,19 +16,19 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._database_migrations_sql_vm_operations import ( build_cancel_request, build_create_or_update_request, @@ -35,10 +36,10 @@ build_get_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -87,12 +88,11 @@ async def get( :type migration_operation_id: str :param expand: Complete migration details be included in the response. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DatabaseMigrationSqlVm or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -103,12 +103,10 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DatabaseMigrationSqlVm] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, sql_virtual_machine_name=sql_virtual_machine_name, target_db_name=target_db_name, @@ -116,15 +114,14 @@ async def get( migration_operation_id=migration_operation_id, expand=expand, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -133,26 +130,22 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response) + deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return deserialized # type: ignore async def _create_or_update_initial( self, resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: Union[_models.DatabaseMigrationSqlVm, IO], + parameters: Union[_models.DatabaseMigrationSqlVm, IO[bytes]], **kwargs: Any - ) -> _models.DatabaseMigrationSqlVm: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -163,21 +156,19 @@ async def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DatabaseMigrationSqlVm] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DatabaseMigrationSqlVm") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, sql_virtual_machine_name=sql_virtual_machine_name, target_db_name=target_db_name, @@ -186,38 +177,34 @@ async def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } - @overload async def begin_create_or_update( self, @@ -243,14 +230,6 @@ async def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlVm or the result of cls(response) :rtype: @@ -264,7 +243,7 @@ async def begin_create_or_update( resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -279,18 +258,10 @@ async def begin_create_or_update( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Details of SqlMigrationService resource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlVm or the result of cls(response) :rtype: @@ -304,7 +275,7 @@ async def begin_create_or_update( resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: Union[_models.DatabaseMigrationSqlVm, IO], + parameters: Union[_models.DatabaseMigrationSqlVm, IO[bytes]], **kwargs: Any ) -> AsyncLROPoller[_models.DatabaseMigrationSqlVm]: """Create a new database migration to a given SQL VM. @@ -316,20 +287,9 @@ async def begin_create_or_update( :type sql_virtual_machine_name: str :param target_db_name: The name of the target database. Required. :type target_db_name: str - :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Details of SqlMigrationService resource. Is either a DatabaseMigrationSqlVm + type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm or IO[bytes] :return: An instance of AsyncLROPoller that returns either DatabaseMigrationSqlVm or the result of cls(response) :rtype: @@ -339,9 +299,7 @@ async def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DatabaseMigrationSqlVm] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -360,12 +318,13 @@ async def begin_create_or_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response) + deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -375,27 +334,25 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.DatabaseMigrationSqlVm].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return AsyncLROPoller[_models.DatabaseMigrationSqlVm]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) - async def _cancel_initial( # pylint: disable=inconsistent-return-statements + async def _cancel_initial( self, resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any - ) -> None: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -406,21 +363,19 @@ async def _cancel_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "MigrationOperationInput") - request = build_cancel_request( + _request = build_cancel_request( resource_group_name=resource_group_name, sql_virtual_machine_name=sql_virtual_machine_name, target_db_name=target_db_name, @@ -429,29 +384,33 @@ async def _cancel_initial( # pylint: disable=inconsistent-return-statements content_type=content_type, json=_json, content=_content, - template_url=self._cancel_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _cancel_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel" - } + return deserialized # type: ignore @overload async def begin_cancel( @@ -478,14 +437,6 @@ async def begin_cancel( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -497,7 +448,7 @@ async def begin_cancel( resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -512,18 +463,10 @@ async def begin_cancel( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -535,7 +478,7 @@ async def begin_cancel( resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any ) -> AsyncLROPoller[None]: """Stop in-progress database migration to SQL VM. @@ -547,19 +490,8 @@ async def begin_cancel( :type sql_virtual_machine_name: str :param target_db_name: The name of the target database. Required. :type target_db_name: str - :param parameters: Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Is either a MigrationOperationInput type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes] :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -567,16 +499,14 @@ async def begin_cancel( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._cancel_initial( # type: ignore + raw_result = await self._cancel_initial( resource_group_name=resource_group_name, sql_virtual_machine_name=sql_virtual_machine_name, target_db_name=target_db_name, @@ -588,11 +518,12 @@ async def begin_cancel( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -601,27 +532,23 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_cancel.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - async def _cutover_initial( # pylint: disable=inconsistent-return-statements + async def _cutover_initial( self, resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any - ) -> None: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -632,21 +559,19 @@ async def _cutover_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "MigrationOperationInput") - request = build_cutover_request( + _request = build_cutover_request( resource_group_name=resource_group_name, sql_virtual_machine_name=sql_virtual_machine_name, target_db_name=target_db_name, @@ -655,29 +580,33 @@ async def _cutover_initial( # pylint: disable=inconsistent-return-statements content_type=content_type, json=_json, content=_content, - template_url=self._cutover_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _cutover_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover" - } + return deserialized # type: ignore @overload async def begin_cutover( @@ -704,14 +633,6 @@ async def begin_cutover( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -723,7 +644,7 @@ async def begin_cutover( resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -738,18 +659,10 @@ async def begin_cutover( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -761,7 +674,7 @@ async def begin_cutover( resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any ) -> AsyncLROPoller[None]: """Initiate cutover for in-progress online database migration to SQL VM. @@ -773,19 +686,8 @@ async def begin_cutover( :type sql_virtual_machine_name: str :param target_db_name: The name of the target database. Required. :type target_db_name: str - :param parameters: Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Is either a MigrationOperationInput type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes] :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -793,16 +695,14 @@ async def begin_cutover( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._cutover_initial( # type: ignore + raw_result = await self._cutover_initial( resource_group_name=resource_group_name, sql_virtual_machine_name=sql_virtual_machine_name, target_db_name=target_db_name, @@ -814,11 +714,12 @@ async def begin_cutover( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -827,14 +728,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_cutover.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_files_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_files_operations.py index 028ca68bb599..757631111287 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_files_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_files_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -20,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._files_operations import ( build_create_or_update_request, build_delete_request, @@ -39,10 +38,10 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -81,7 +80,6 @@ def list( :type service_name: str :param project_name: Name of the project. Required. :type project_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ProjectFile or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.ProjectFile] :raises ~azure.core.exceptions.HttpResponseError: @@ -89,12 +87,10 @@ def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.FileList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -105,18 +101,16 @@ def list( def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( group_name=group_name, service_name=service_name, project_name=project_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -128,13 +122,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("FileList", pipeline_response) @@ -144,10 +137,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -160,10 +154,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files" - } - @distributed_trace_async async def get( self, group_name: str, service_name: str, project_name: str, file_name: str, **kwargs: Any @@ -181,12 +171,11 @@ async def get( :type project_name: str :param file_name: Name of the File. Required. :type file_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectFile or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectFile :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -197,27 +186,24 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ProjectFile] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( group_name=group_name, service_name=service_name, project_name=project_name, file_name=file_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -227,16 +213,12 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectFile", pipeline_response) + deserialized = self._deserialize("ProjectFile", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}" - } + return deserialized # type: ignore @overload async def create_or_update( @@ -267,7 +249,6 @@ async def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectFile or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectFile :raises ~azure.core.exceptions.HttpResponseError: @@ -280,7 +261,7 @@ async def create_or_update( service_name: str, project_name: str, file_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -298,11 +279,10 @@ async def create_or_update( :param file_name: Name of the File. Required. :type file_name: str :param parameters: Information about the file. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectFile or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectFile :raises ~azure.core.exceptions.HttpResponseError: @@ -315,7 +295,7 @@ async def create_or_update( service_name: str, project_name: str, file_name: str, - parameters: Union[_models.ProjectFile, IO], + parameters: Union[_models.ProjectFile, IO[bytes]], **kwargs: Any ) -> _models.ProjectFile: """Create a file resource. @@ -330,17 +310,14 @@ async def create_or_update( :type project_name: str :param file_name: Name of the File. Required. :type file_name: str - :param parameters: Information about the file. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Information about the file. Is either a ProjectFile type or a IO[bytes] + type. Required. + :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO[bytes] :return: ProjectFile or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectFile :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -351,21 +328,19 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ProjectFile] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "ProjectFile") - request = build_create_or_update_request( + _request = build_create_or_update_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -375,15 +350,14 @@ async def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -393,21 +367,13 @@ async def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("ProjectFile", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("ProjectFile", pipeline_response) + deserialized = self._deserialize("ProjectFile", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}" - } - @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements self, group_name: str, service_name: str, project_name: str, file_name: str, **kwargs: Any @@ -424,12 +390,11 @@ async def delete( # pylint: disable=inconsistent-return-statements :type project_name: str :param file_name: Name of the File. Required. :type file_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -440,27 +405,24 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( group_name=group_name, service_name=service_name, project_name=project_name, file_name=file_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -471,11 +433,7 @@ async def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}" - } + return cls(pipeline_response, None, {}) # type: ignore @overload async def update( @@ -506,7 +464,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectFile or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectFile :raises ~azure.core.exceptions.HttpResponseError: @@ -519,7 +476,7 @@ async def update( service_name: str, project_name: str, file_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -537,11 +494,10 @@ async def update( :param file_name: Name of the File. Required. :type file_name: str :param parameters: Information about the file. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectFile or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectFile :raises ~azure.core.exceptions.HttpResponseError: @@ -554,7 +510,7 @@ async def update( service_name: str, project_name: str, file_name: str, - parameters: Union[_models.ProjectFile, IO], + parameters: Union[_models.ProjectFile, IO[bytes]], **kwargs: Any ) -> _models.ProjectFile: """Update a file. @@ -569,17 +525,14 @@ async def update( :type project_name: str :param file_name: Name of the File. Required. :type file_name: str - :param parameters: Information about the file. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Information about the file. Is either a ProjectFile type or a IO[bytes] + type. Required. + :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO[bytes] :return: ProjectFile or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectFile :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -590,21 +543,19 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ProjectFile] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "ProjectFile") - request = build_update_request( + _request = build_update_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -614,15 +565,14 @@ async def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -632,16 +582,12 @@ async def update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectFile", pipeline_response) + deserialized = self._deserialize("ProjectFile", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}" - } + return deserialized # type: ignore @distributed_trace_async async def read( @@ -660,12 +606,11 @@ async def read( :type project_name: str :param file_name: Name of the File. Required. :type file_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: FileStorageInfo or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.FileStorageInfo :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -676,27 +621,24 @@ async def read( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.FileStorageInfo] = kwargs.pop("cls", None) - request = build_read_request( + _request = build_read_request( group_name=group_name, service_name=service_name, project_name=project_name, file_name=file_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.read.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -706,16 +648,12 @@ async def read( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("FileStorageInfo", pipeline_response) + deserialized = self._deserialize("FileStorageInfo", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - read.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}/read" - } + return deserialized # type: ignore @distributed_trace_async async def read_write( @@ -733,12 +671,11 @@ async def read_write( :type project_name: str :param file_name: Name of the File. Required. :type file_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: FileStorageInfo or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.FileStorageInfo :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -749,27 +686,24 @@ async def read_write( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.FileStorageInfo] = kwargs.pop("cls", None) - request = build_read_write_request( + _request = build_read_write_request( group_name=group_name, service_name=service_name, project_name=project_name, file_name=file_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.read_write.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -779,13 +713,9 @@ async def read_write( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("FileStorageInfo", pipeline_response) + deserialized = self._deserialize("FileStorageInfo", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - read_write.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}/readwrite" - } + return deserialized # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_migration_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_migration_services_operations.py new file mode 100644 index 000000000000..4e2be80c82cf --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_migration_services_operations.py @@ -0,0 +1,872 @@ +# pylint: disable=too-many-lines,too-many-statements +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +import sys +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ...operations._migration_services_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_by_resource_group_request, + build_list_by_subscription_request, + build_list_migrations_request, + build_update_request, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class MigrationServicesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.datamigration.aio.DataMigrationManagementClient`'s + :attr:`migration_services` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get( + self, resource_group_name: str, migration_service_name: str, **kwargs: Any + ) -> _models.MigrationService: + """Retrieve the Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :return: MigrationService or the result of cls(response) + :rtype: ~azure.mgmt.datamigration.models.MigrationService + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.MigrationService] = kwargs.pop("cls", None) + + _request = build_get_request( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("MigrationService", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + migration_service_name: str, + parameters: Union[_models.MigrationService, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "MigrationService") + + _request = build_create_or_update_request( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + migration_service_name: str, + parameters: _models.MigrationService, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.MigrationService]: + """Create or Update Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :param parameters: Details of MigrationService resource. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationService + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns either MigrationService or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + migration_service_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.MigrationService]: + """Create or Update Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :param parameters: Details of MigrationService resource. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns either MigrationService or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + migration_service_name: str, + parameters: Union[_models.MigrationService, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.MigrationService]: + """Create or Update Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :param parameters: Details of MigrationService resource. Is either a MigrationService type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationService or IO[bytes] + :return: An instance of AsyncLROPoller that returns either MigrationService or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.MigrationService] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("MigrationService", pipeline_response.http_response) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.MigrationService].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.MigrationService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, migration_service_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_delete_request( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete( + self, resource_group_name: str, migration_service_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + migration_service_name: str, + parameters: Union[_models.MigrationServiceUpdate, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "MigrationServiceUpdate") + + _request = build_update_request( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + migration_service_name: str, + parameters: _models.MigrationServiceUpdate, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.MigrationService]: + """Update Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :param parameters: Details of MigrationService resource. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationServiceUpdate + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns either MigrationService or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + migration_service_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.MigrationService]: + """Update Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :param parameters: Details of MigrationService resource. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns either MigrationService or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + migration_service_name: str, + parameters: Union[_models.MigrationServiceUpdate, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.MigrationService]: + """Update Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :param parameters: Details of MigrationService resource. Is either a MigrationServiceUpdate + type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationServiceUpdate or IO[bytes] + :return: An instance of AsyncLROPoller that returns either MigrationService or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.MigrationService] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("MigrationService", pipeline_response.http_response) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.MigrationService].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.MigrationService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @distributed_trace + def list_by_resource_group( + self, resource_group_name: str, **kwargs: Any + ) -> AsyncIterable["_models.MigrationService"]: + """Retrieve all migration services in the resource group. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :return: An iterator like instance of either MigrationService or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.MigrationServiceListResult] = kwargs.pop("cls", None) + + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("MigrationServiceListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.MigrationService"]: + """Retrieve all migration services in the subscriptions. + + :return: An iterator like instance of either MigrationService or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.MigrationServiceListResult] = kwargs.pop("cls", None) + + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("MigrationServiceListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_migrations( + self, resource_group_name: str, migration_service_name: str, **kwargs: Any + ) -> AsyncIterable["_models.DatabaseMigrationBase"]: + """Retrieve the List of database migrations attached to the service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :return: An iterator like instance of either DatabaseMigrationBase or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.DatabaseMigrationBase] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DatabaseMigrationBaseListResult] = kwargs.pop("cls", None) + + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_list_migrations_request( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("DatabaseMigrationBaseListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_operations.py index 92b231c62180..f933295d43d0 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -20,20 +20,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._operations import build_list_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -61,7 +59,6 @@ def __init__(self, *args, **kwargs) -> None: def list(self, **kwargs: Any) -> AsyncIterable["_models.OperationsDefinition"]: """Lists all of the available SQL Migration REST API operations. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either OperationsDefinition or the result of cls(response) :rtype: @@ -71,12 +68,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.OperationsDefinition"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -87,14 +82,12 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.OperationsDefinition"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -106,13 +99,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("OperationListResult", pipeline_response) @@ -122,10 +114,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -136,5 +129,3 @@ async def get_next(next_link=None): return pipeline_response return AsyncItemPaged(get_next, extract_data) - - list.metadata = {"url": "/providers/Microsoft.DataMigration/operations"} diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_projects_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_projects_operations.py index eae43bac1c2d..39b7e0d94e7c 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_projects_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_projects_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -20,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._projects_operations import ( build_create_or_update_request, build_delete_request, @@ -37,10 +36,10 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -75,7 +74,6 @@ def list(self, group_name: str, service_name: str, **kwargs: Any) -> AsyncIterab :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Project or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.Project] :raises ~azure.core.exceptions.HttpResponseError: @@ -83,12 +81,10 @@ def list(self, group_name: str, service_name: str, **kwargs: Any) -> AsyncIterab _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ProjectList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -99,17 +95,15 @@ def list(self, group_name: str, service_name: str, **kwargs: Any) -> AsyncIterab def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -121,13 +115,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("ProjectList", pipeline_response) @@ -137,10 +130,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -153,10 +147,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects" - } - @overload async def create_or_update( self, @@ -184,7 +174,6 @@ async def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Project or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.Project :raises ~azure.core.exceptions.HttpResponseError: @@ -196,7 +185,7 @@ async def create_or_update( group_name: str, service_name: str, project_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -213,11 +202,10 @@ async def create_or_update( :param project_name: Name of the project. Required. :type project_name: str :param parameters: Information about the project. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Project or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.Project :raises ~azure.core.exceptions.HttpResponseError: @@ -229,7 +217,7 @@ async def create_or_update( group_name: str, service_name: str, project_name: str, - parameters: Union[_models.Project, IO], + parameters: Union[_models.Project, IO[bytes]], **kwargs: Any ) -> _models.Project: """Create or update project. @@ -243,18 +231,14 @@ async def create_or_update( :type service_name: str :param project_name: Name of the project. Required. :type project_name: str - :param parameters: Information about the project. Is either a model type or a IO type. + :param parameters: Information about the project. Is either a Project type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.datamigration.models.Project or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :type parameters: ~azure.mgmt.datamigration.models.Project or IO[bytes] :return: Project or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.Project :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -265,21 +249,19 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Project] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "Project") - request = build_create_or_update_request( + _request = build_create_or_update_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -288,15 +270,14 @@ async def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -306,21 +287,13 @@ async def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("Project", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("Project", pipeline_response) + deserialized = self._deserialize("Project", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}" - } - @distributed_trace_async async def get(self, group_name: str, service_name: str, project_name: str, **kwargs: Any) -> _models.Project: """Get project information. @@ -334,12 +307,11 @@ async def get(self, group_name: str, service_name: str, project_name: str, **kwa :type service_name: str :param project_name: Name of the project. Required. :type project_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Project or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.Project :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -350,26 +322,23 @@ async def get(self, group_name: str, service_name: str, project_name: str, **kwa _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Project] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( group_name=group_name, service_name=service_name, project_name=project_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -379,16 +348,12 @@ async def get(self, group_name: str, service_name: str, project_name: str, **kwa error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Project", pipeline_response) + deserialized = self._deserialize("Project", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}" - } + return deserialized # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements @@ -413,12 +378,11 @@ async def delete( # pylint: disable=inconsistent-return-statements :param delete_running_tasks: Delete the resource even if it contains running tasks. Default value is None. :type delete_running_tasks: bool - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -429,27 +393,24 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( group_name=group_name, service_name=service_name, project_name=project_name, subscription_id=self._config.subscription_id, delete_running_tasks=delete_running_tasks, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -460,11 +421,7 @@ async def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}" - } + return cls(pipeline_response, None, {}) # type: ignore @overload async def update( @@ -493,7 +450,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Project or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.Project :raises ~azure.core.exceptions.HttpResponseError: @@ -505,7 +461,7 @@ async def update( group_name: str, service_name: str, project_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -522,11 +478,10 @@ async def update( :param project_name: Name of the project. Required. :type project_name: str :param parameters: Information about the project. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Project or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.Project :raises ~azure.core.exceptions.HttpResponseError: @@ -538,7 +493,7 @@ async def update( group_name: str, service_name: str, project_name: str, - parameters: Union[_models.Project, IO], + parameters: Union[_models.Project, IO[bytes]], **kwargs: Any ) -> _models.Project: """Update project. @@ -552,18 +507,14 @@ async def update( :type service_name: str :param project_name: Name of the project. Required. :type project_name: str - :param parameters: Information about the project. Is either a model type or a IO type. + :param parameters: Information about the project. Is either a Project type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.datamigration.models.Project or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :type parameters: ~azure.mgmt.datamigration.models.Project or IO[bytes] :return: Project or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.Project :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -574,21 +525,19 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Project] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "Project") - request = build_update_request( + _request = build_update_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -597,15 +546,14 @@ async def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -615,13 +563,9 @@ async def update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Project", pipeline_response) + deserialized = self._deserialize("Project", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}" - } + return deserialized # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_resource_skus_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_resource_skus_operations.py index 9cafaf7e7f73..3a75b7a56b7b 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_resource_skus_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_resource_skus_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -20,20 +20,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._resource_skus_operations import build_list_skus_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -61,9 +59,8 @@ def __init__(self, *args, **kwargs) -> None: def list_skus(self, **kwargs: Any) -> AsyncIterable["_models.ResourceSku"]: """Get supported SKUs. - The skus action returns the list of SKUs that DMS supports. + The skus action returns the list of SKUs that DMS (classic) supports. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ResourceSku or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.ResourceSku] :raises ~azure.core.exceptions.HttpResponseError: @@ -71,12 +68,10 @@ def list_skus(self, **kwargs: Any) -> AsyncIterable["_models.ResourceSku"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ResourceSkusResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -87,15 +82,13 @@ def list_skus(self, **kwargs: Any) -> AsyncIterable["_models.ResourceSku"]: def prepare_request(next_link=None): if not next_link: - request = build_list_skus_request( + _request = build_list_skus_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_skus.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -107,13 +100,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("ResourceSkusResult", pipeline_response) @@ -123,10 +115,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -138,5 +131,3 @@ async def get_next(next_link=None): return pipeline_response return AsyncItemPaged(get_next, extract_data) - - list_skus.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/skus"} diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_service_tasks_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_service_tasks_operations.py index acf45d6008c8..20f5bebb6d6e 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_service_tasks_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_service_tasks_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -20,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._service_tasks_operations import ( build_cancel_request, build_create_or_update_request, @@ -38,10 +37,10 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -71,10 +70,10 @@ def list( ) -> AsyncIterable["_models.ProjectTask"]: """Get service level tasks for a service. - The services resource is the top-level resource that represents the Database Migration Service. - This method returns a list of service level tasks owned by a service resource. Some tasks may - have a status of Unknown, which indicates that an error occurred while querying the status of - that task. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). This method returns a list of service level tasks owned by a service + resource. Some tasks may have a status of Unknown, which indicates that an error occurred while + querying the status of that task. :param group_name: Name of the resource group. Required. :type group_name: str @@ -82,7 +81,6 @@ def list( :type service_name: str :param task_type: Filter tasks by task type. Default value is None. :type task_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ProjectTask or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.ProjectTask] :raises ~azure.core.exceptions.HttpResponseError: @@ -90,12 +88,10 @@ def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.TaskList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -106,18 +102,16 @@ def list( def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, task_type=task_type, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -129,13 +123,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("TaskList", pipeline_response) @@ -145,10 +138,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -161,10 +155,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks" - } - @overload async def create_or_update( self, @@ -179,9 +169,9 @@ async def create_or_update( """Create or update service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The PUT method creates a new service task or updates an existing one, although - since service tasks have no mutable custom properties, there is little reason to update an - existing one. + DMS (classic) instance. The PUT method creates a new service task or updates an existing one, + although since service tasks have no mutable custom properties, there is little reason to + update an existing one. :param group_name: Name of the resource group. Required. :type group_name: str @@ -194,7 +184,6 @@ async def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -206,7 +195,7 @@ async def create_or_update( group_name: str, service_name: str, task_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -214,9 +203,9 @@ async def create_or_update( """Create or update service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The PUT method creates a new service task or updates an existing one, although - since service tasks have no mutable custom properties, there is little reason to update an - existing one. + DMS (classic) instance. The PUT method creates a new service task or updates an existing one, + although since service tasks have no mutable custom properties, there is little reason to + update an existing one. :param group_name: Name of the resource group. Required. :type group_name: str @@ -225,11 +214,10 @@ async def create_or_update( :param task_name: Name of the Task. Required. :type task_name: str :param parameters: Information about the task. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -241,15 +229,15 @@ async def create_or_update( group_name: str, service_name: str, task_name: str, - parameters: Union[_models.ProjectTask, IO], + parameters: Union[_models.ProjectTask, IO[bytes]], **kwargs: Any ) -> _models.ProjectTask: """Create or update service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The PUT method creates a new service task or updates an existing one, although - since service tasks have no mutable custom properties, there is little reason to update an - existing one. + DMS (classic) instance. The PUT method creates a new service task or updates an existing one, + although since service tasks have no mutable custom properties, there is little reason to + update an existing one. :param group_name: Name of the resource group. Required. :type group_name: str @@ -257,17 +245,14 @@ async def create_or_update( :type service_name: str :param task_name: Name of the Task. Required. :type task_name: str - :param parameters: Information about the task. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes] + type. Required. + :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes] :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -278,21 +263,19 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "ProjectTask") - request = build_create_or_update_request( + _request = build_create_or_update_request( group_name=group_name, service_name=service_name, task_name=task_name, @@ -301,15 +284,14 @@ async def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -319,21 +301,13 @@ async def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("ProjectTask", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}" - } - @distributed_trace_async async def get( self, group_name: str, service_name: str, task_name: str, expand: Optional[str] = None, **kwargs: Any @@ -341,7 +315,7 @@ async def get( """Get service task information. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The GET method retrieves information about a service task. + DMS (classic) instance. The GET method retrieves information about a service task. :param group_name: Name of the resource group. Required. :type group_name: str @@ -351,12 +325,11 @@ async def get( :type task_name: str :param expand: Expand the response. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -367,27 +340,24 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( group_name=group_name, service_name=service_name, task_name=task_name, subscription_id=self._config.subscription_id, expand=expand, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -397,16 +367,12 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}" - } + return deserialized # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements @@ -420,7 +386,8 @@ async def delete( # pylint: disable=inconsistent-return-statements """Delete service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The DELETE method deletes a service task, canceling it first if it's running. + DMS (classic) instance. The DELETE method deletes a service task, canceling it first if it's + running. :param group_name: Name of the resource group. Required. :type group_name: str @@ -431,12 +398,11 @@ async def delete( # pylint: disable=inconsistent-return-statements :param delete_running_tasks: Delete the resource even if it contains running tasks. Default value is None. :type delete_running_tasks: bool - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -447,27 +413,24 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( group_name=group_name, service_name=service_name, task_name=task_name, subscription_id=self._config.subscription_id, delete_running_tasks=delete_running_tasks, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -478,11 +441,7 @@ async def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}" - } + return cls(pipeline_response, None, {}) # type: ignore @overload async def update( @@ -498,8 +457,8 @@ async def update( """Create or update service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The PATCH method updates an existing service task, but since service tasks have - no mutable custom properties, there is little reason to do so. + DMS (classic) instance. The PATCH method updates an existing service task, but since service + tasks have no mutable custom properties, there is little reason to do so. :param group_name: Name of the resource group. Required. :type group_name: str @@ -512,7 +471,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -524,7 +482,7 @@ async def update( group_name: str, service_name: str, task_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -532,8 +490,8 @@ async def update( """Create or update service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The PATCH method updates an existing service task, but since service tasks have - no mutable custom properties, there is little reason to do so. + DMS (classic) instance. The PATCH method updates an existing service task, but since service + tasks have no mutable custom properties, there is little reason to do so. :param group_name: Name of the resource group. Required. :type group_name: str @@ -542,11 +500,10 @@ async def update( :param task_name: Name of the Task. Required. :type task_name: str :param parameters: Information about the task. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -558,14 +515,14 @@ async def update( group_name: str, service_name: str, task_name: str, - parameters: Union[_models.ProjectTask, IO], + parameters: Union[_models.ProjectTask, IO[bytes]], **kwargs: Any ) -> _models.ProjectTask: """Create or update service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The PATCH method updates an existing service task, but since service tasks have - no mutable custom properties, there is little reason to do so. + DMS (classic) instance. The PATCH method updates an existing service task, but since service + tasks have no mutable custom properties, there is little reason to do so. :param group_name: Name of the resource group. Required. :type group_name: str @@ -573,17 +530,14 @@ async def update( :type service_name: str :param task_name: Name of the Task. Required. :type task_name: str - :param parameters: Information about the task. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes] + type. Required. + :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes] :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -594,21 +548,19 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "ProjectTask") - request = build_update_request( + _request = build_update_request( group_name=group_name, service_name=service_name, task_name=task_name, @@ -617,15 +569,14 @@ async def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -635,23 +586,19 @@ async def update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}" - } + return deserialized # type: ignore @distributed_trace_async async def cancel(self, group_name: str, service_name: str, task_name: str, **kwargs: Any) -> _models.ProjectTask: """Cancel a service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. This method cancels a service task if it's currently queued or running. + DMS (classic) instance. This method cancels a service task if it's currently queued or running. :param group_name: Name of the resource group. Required. :type group_name: str @@ -659,12 +606,11 @@ async def cancel(self, group_name: str, service_name: str, task_name: str, **kwa :type service_name: str :param task_name: Name of the Task. Required. :type task_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -675,26 +621,23 @@ async def cancel(self, group_name: str, service_name: str, task_name: str, **kwa _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) - request = build_cancel_request( + _request = build_cancel_request( group_name=group_name, service_name=service_name, task_name=task_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.cancel.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -704,13 +647,9 @@ async def cancel(self, group_name: str, service_name: str, task_name: str, **kwa error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - cancel.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}/cancel" - } + return deserialized # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_services_operations.py index 1b8569459c42..e86c102f1c2d 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_services_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_services_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -17,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -30,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._services_operations import ( build_check_children_name_availability_request, build_check_name_availability_request, @@ -46,10 +47,10 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -74,9 +75,13 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") async def _create_or_update_initial( - self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any - ) -> Optional[_models.DataMigrationService]: - error_map = { + self, + group_name: str, + service_name: str, + parameters: Union[_models.DataMigrationService, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -87,21 +92,19 @@ async def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.DataMigrationService]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DataMigrationService") - request = build_create_or_update_request( + _request = build_create_or_update_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, @@ -109,39 +112,34 @@ async def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("DataMigrationService", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("DataMigrationService", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}" - } + return deserialized # type: ignore @overload async def begin_create_or_update( @@ -153,16 +151,16 @@ async def begin_create_or_update( content_type: str = "application/json", **kwargs: Any ) -> AsyncLROPoller[_models.DataMigrationService]: - """Create or update DMS Instance. + """Create or update DMS (classic) Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The PUT method creates a new service or updates an existing one. When a service is updated, - existing child resources (i.e. tasks) are unaffected. Services currently support a single kind, - "vm", which refers to a VM-based service, although other kinds may be added in the future. This - method can change the kind, SKU, and network of the service, but if tasks are currently running - (i.e. the service is busy), this will fail with 400 Bad Request ("ServiceIsBusy"). The provider - will reply when successful with 200 OK or 201 Created. Long-running operations use the - provisioningState property. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The PUT method creates a new service or updates an existing one. When a + service is updated, existing child resources (i.e. tasks) are unaffected. Services currently + support a single kind, "vm", which refers to a VM-based service, although other kinds may be + added in the future. This method can change the kind, SKU, and network of the service, but if + tasks are currently running (i.e. the service is busy), this will fail with 400 Bad Request + ("ServiceIsBusy"). The provider will reply when successful with 200 OK or 201 Created. + Long-running operations use the provisioningState property. :param group_name: Name of the resource group. Required. :type group_name: str @@ -173,14 +171,6 @@ async def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DataMigrationService or the result of cls(response) :rtype: @@ -193,39 +183,31 @@ async def begin_create_or_update( self, group_name: str, service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any ) -> AsyncLROPoller[_models.DataMigrationService]: - """Create or update DMS Instance. + """Create or update DMS (classic) Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The PUT method creates a new service or updates an existing one. When a service is updated, - existing child resources (i.e. tasks) are unaffected. Services currently support a single kind, - "vm", which refers to a VM-based service, although other kinds may be added in the future. This - method can change the kind, SKU, and network of the service, but if tasks are currently running - (i.e. the service is busy), this will fail with 400 Bad Request ("ServiceIsBusy"). The provider - will reply when successful with 200 OK or 201 Created. Long-running operations use the - provisioningState property. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The PUT method creates a new service or updates an existing one. When a + service is updated, existing child resources (i.e. tasks) are unaffected. Services currently + support a single kind, "vm", which refers to a VM-based service, although other kinds may be + added in the future. This method can change the kind, SKU, and network of the service, but if + tasks are currently running (i.e. the service is busy), this will fail with 400 Bad Request + ("ServiceIsBusy"). The provider will reply when successful with 200 OK or 201 Created. + Long-running operations use the provisioningState property. :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str :param parameters: Information about the service. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DataMigrationService or the result of cls(response) :rtype: @@ -235,37 +217,30 @@ async def begin_create_or_update( @distributed_trace_async async def begin_create_or_update( - self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any + self, + group_name: str, + service_name: str, + parameters: Union[_models.DataMigrationService, IO[bytes]], + **kwargs: Any ) -> AsyncLROPoller[_models.DataMigrationService]: - """Create or update DMS Instance. + """Create or update DMS (classic) Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The PUT method creates a new service or updates an existing one. When a service is updated, - existing child resources (i.e. tasks) are unaffected. Services currently support a single kind, - "vm", which refers to a VM-based service, although other kinds may be added in the future. This - method can change the kind, SKU, and network of the service, but if tasks are currently running - (i.e. the service is busy), this will fail with 400 Bad Request ("ServiceIsBusy"). The provider - will reply when successful with 200 OK or 201 Created. Long-running operations use the - provisioningState property. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The PUT method creates a new service or updates an existing one. When a + service is updated, existing child resources (i.e. tasks) are unaffected. Services currently + support a single kind, "vm", which refers to a VM-based service, although other kinds may be + added in the future. This method can change the kind, SKU, and network of the service, but if + tasks are currently running (i.e. the service is busy), this will fail with 400 Bad Request + ("ServiceIsBusy"). The provider will reply when successful with 200 OK or 201 Created. + Long-running operations use the provisioningState property. :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :param parameters: Information about the service. Is either a model type or a IO type. - Required. - :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Information about the service. Is either a DataMigrationService type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO[bytes] :return: An instance of AsyncLROPoller that returns either DataMigrationService or the result of cls(response) :rtype: @@ -275,9 +250,7 @@ async def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataMigrationService] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -295,12 +268,13 @@ async def begin_create_or_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DataMigrationService", pipeline_response) + deserialized = self._deserialize("DataMigrationService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -310,35 +284,32 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.DataMigrationService].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}" - } + return AsyncLROPoller[_models.DataMigrationService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @distributed_trace_async async def get(self, group_name: str, service_name: str, **kwargs: Any) -> _models.DataMigrationService: - """Get DMS Service Instance. + """Get DMS (classic) Service Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The GET method retrieves information about a service instance. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The GET method retrieves information about a service instance. :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataMigrationService or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DataMigrationService :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -349,25 +320,22 @@ async def get(self, group_name: str, service_name: str, **kwargs: Any) -> _model _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataMigrationService] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -377,21 +345,17 @@ async def get(self, group_name: str, service_name: str, **kwargs: Any) -> _model error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataMigrationService", pipeline_response) + deserialized = self._deserialize("DataMigrationService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}" - } + return deserialized # type: ignore - async def _delete_initial( # pylint: disable=inconsistent-return-statements + async def _delete_initial( self, group_name: str, service_name: str, delete_running_tasks: Optional[bool] = None, **kwargs: Any - ) -> None: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -402,50 +366,52 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, delete_running_tasks=delete_running_tasks, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}" - } + return deserialized # type: ignore @distributed_trace_async async def begin_delete( self, group_name: str, service_name: str, delete_running_tasks: Optional[bool] = None, **kwargs: Any ) -> AsyncLROPoller[None]: - """Delete DMS Service Instance. + """Delete DMS (classic) Service Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The DELETE method deletes a service. Any running tasks will be canceled. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The DELETE method deletes a service. Any running tasks will be canceled. :param group_name: Name of the resource group. Required. :type group_name: str @@ -454,14 +420,6 @@ async def begin_delete( :param delete_running_tasks: Delete the resource even if it contains running tasks. Default value is None. :type delete_running_tasks: bool - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -469,15 +427,13 @@ async def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( group_name=group_name, service_name=service_name, delete_running_tasks=delete_running_tasks, @@ -487,11 +443,12 @@ async def begin_delete( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -500,22 +457,22 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore async def _update_initial( - self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any - ) -> Optional[_models.DataMigrationService]: - error_map = { + self, + group_name: str, + service_name: str, + parameters: Union[_models.DataMigrationService, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -526,21 +483,19 @@ async def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.DataMigrationService]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DataMigrationService") - request = build_update_request( + _request = build_update_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, @@ -548,36 +503,34 @@ async def _update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("DataMigrationService", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - _update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}" - } + return deserialized # type: ignore @overload async def begin_update( @@ -589,12 +542,12 @@ async def begin_update( content_type: str = "application/json", **kwargs: Any ) -> AsyncLROPoller[_models.DataMigrationService]: - """Create or update DMS Service Instance. + """Create or update DMS (classic) Service Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The PATCH method updates an existing service. This method can change the kind, SKU, and network - of the service, but if tasks are currently running (i.e. the service is busy), this will fail - with 400 Bad Request ("ServiceIsBusy"). + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The PATCH method updates an existing service. This method can change the + kind, SKU, and network of the service, but if tasks are currently running (i.e. the service is + busy), this will fail with 400 Bad Request ("ServiceIsBusy"). :param group_name: Name of the resource group. Required. :type group_name: str @@ -605,14 +558,6 @@ async def begin_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DataMigrationService or the result of cls(response) :rtype: @@ -625,35 +570,27 @@ async def begin_update( self, group_name: str, service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any ) -> AsyncLROPoller[_models.DataMigrationService]: - """Create or update DMS Service Instance. + """Create or update DMS (classic) Service Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The PATCH method updates an existing service. This method can change the kind, SKU, and network - of the service, but if tasks are currently running (i.e. the service is busy), this will fail - with 400 Bad Request ("ServiceIsBusy"). + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The PATCH method updates an existing service. This method can change the + kind, SKU, and network of the service, but if tasks are currently running (i.e. the service is + busy), this will fail with 400 Bad Request ("ServiceIsBusy"). :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str :param parameters: Information about the service. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DataMigrationService or the result of cls(response) :rtype: @@ -663,33 +600,26 @@ async def begin_update( @distributed_trace_async async def begin_update( - self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any + self, + group_name: str, + service_name: str, + parameters: Union[_models.DataMigrationService, IO[bytes]], + **kwargs: Any ) -> AsyncLROPoller[_models.DataMigrationService]: - """Create or update DMS Service Instance. + """Create or update DMS (classic) Service Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The PATCH method updates an existing service. This method can change the kind, SKU, and network - of the service, but if tasks are currently running (i.e. the service is busy), this will fail - with 400 Bad Request ("ServiceIsBusy"). + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The PATCH method updates an existing service. This method can change the + kind, SKU, and network of the service, but if tasks are currently running (i.e. the service is + busy), this will fail with 400 Bad Request ("ServiceIsBusy"). :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :param parameters: Information about the service. Is either a model type or a IO type. - Required. - :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Information about the service. Is either a DataMigrationService type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO[bytes] :return: An instance of AsyncLROPoller that returns either DataMigrationService or the result of cls(response) :rtype: @@ -699,9 +629,7 @@ async def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataMigrationService] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -719,12 +647,13 @@ async def begin_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DataMigrationService", pipeline_response) + deserialized = self._deserialize("DataMigrationService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -734,17 +663,15 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.DataMigrationService].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}" - } + return AsyncLROPoller[_models.DataMigrationService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @distributed_trace_async async def check_status( @@ -752,20 +679,19 @@ async def check_status( ) -> _models.DataMigrationServiceStatusResponse: """Check service health status. - The services resource is the top-level resource that represents the Database Migration Service. - This action performs a health check and returns the status of the service and virtual machine - size. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). This action performs a health check and returns the status of the service + and virtual machine size. :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataMigrationServiceStatusResponse or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DataMigrationServiceStatusResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -776,25 +702,22 @@ async def check_status( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataMigrationServiceStatusResponse] = kwargs.pop("cls", None) - request = build_check_status_request( + _request = build_check_status_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.check_status.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -804,21 +727,15 @@ async def check_status( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataMigrationServiceStatusResponse", pipeline_response) + deserialized = self._deserialize("DataMigrationServiceStatusResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - check_status.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/checkStatus" - } + return deserialized # type: ignore - async def _start_initial( # pylint: disable=inconsistent-return-statements - self, group_name: str, service_name: str, **kwargs: Any - ) -> None: - error_map = { + async def _start_initial(self, group_name: str, service_name: str, **kwargs: Any) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -829,60 +746,55 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - request = build_start_request( + _request = build_start_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._start_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _start_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/start" - } + return deserialized # type: ignore @distributed_trace_async async def begin_start(self, group_name: str, service_name: str, **kwargs: Any) -> AsyncLROPoller[None]: """Start service. - The services resource is the top-level resource that represents the Database Migration Service. - This action starts the service and the service can be used for data migration. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). This action starts the service and the service can be used for data + migration. :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -890,15 +802,13 @@ async def begin_start(self, group_name: str, service_name: str, **kwargs: Any) - _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._start_initial( # type: ignore + raw_result = await self._start_initial( group_name=group_name, service_name=service_name, api_version=api_version, @@ -907,11 +817,12 @@ async def begin_start(self, group_name: str, service_name: str, **kwargs: Any) - params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -920,22 +831,16 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_start.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/start" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - async def _stop_initial( # pylint: disable=inconsistent-return-statements - self, group_name: str, service_name: str, **kwargs: Any - ) -> None: - error_map = { + async def _stop_initial(self, group_name: str, service_name: str, **kwargs: Any) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -946,61 +851,55 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - request = build_stop_request( + _request = build_stop_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._stop_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _stop_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/stop" - } + return deserialized # type: ignore @distributed_trace_async async def begin_stop(self, group_name: str, service_name: str, **kwargs: Any) -> AsyncLROPoller[None]: """Stop service. - The services resource is the top-level resource that represents the Database Migration Service. - This action stops the service and the service cannot be used for data migration. The service - owner won't be billed when the service is stopped. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). This action stops the service and the service cannot be used for data + migration. The service owner won't be billed when the service is stopped. :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1008,15 +907,13 @@ async def begin_stop(self, group_name: str, service_name: str, **kwargs: Any) -> _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._stop_initial( # type: ignore + raw_result = await self._stop_initial( group_name=group_name, service_name=service_name, api_version=api_version, @@ -1025,11 +922,12 @@ async def begin_stop(self, group_name: str, service_name: str, **kwargs: Any) -> params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -1038,17 +936,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_stop.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/stop" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace def list_skus( @@ -1056,14 +950,13 @@ def list_skus( ) -> AsyncIterable["_models.AvailableServiceSku"]: """Get compatible SKUs. - The services resource is the top-level resource that represents the Database Migration Service. - The skus action returns the list of SKUs that a service resource can be updated to. + The services resource is the top-level resource that represents the Database Migration Service + (classic). The skus action returns the list of SKUs that a service resource can be updated to. :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AvailableServiceSku or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.AvailableServiceSku] @@ -1072,12 +965,10 @@ def list_skus( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ServiceSkuList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1088,17 +979,15 @@ def list_skus( def prepare_request(next_link=None): if not next_link: - request = build_list_skus_request( + _request = build_list_skus_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_skus.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -1110,13 +999,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("ServiceSkuList", pipeline_response) @@ -1126,10 +1014,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1142,10 +1031,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_skus.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/skus" - } - @overload async def check_children_name_availability( self, @@ -1169,7 +1054,6 @@ async def check_children_name_availability( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: NameAvailabilityResponse or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse :raises ~azure.core.exceptions.HttpResponseError: @@ -1180,7 +1064,7 @@ async def check_children_name_availability( self, group_name: str, service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -1194,11 +1078,10 @@ async def check_children_name_availability( :param service_name: Name of the service. Required. :type service_name: str :param parameters: Requested name to validate. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: NameAvailabilityResponse or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse :raises ~azure.core.exceptions.HttpResponseError: @@ -1206,7 +1089,11 @@ async def check_children_name_availability( @distributed_trace_async async def check_children_name_availability( - self, group_name: str, service_name: str, parameters: Union[_models.NameAvailabilityRequest, IO], **kwargs: Any + self, + group_name: str, + service_name: str, + parameters: Union[_models.NameAvailabilityRequest, IO[bytes]], + **kwargs: Any ) -> _models.NameAvailabilityResponse: """Check nested resource name validity and availability. @@ -1216,17 +1103,14 @@ async def check_children_name_availability( :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :param parameters: Requested name to validate. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Requested name to validate. Is either a NameAvailabilityRequest type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO[bytes] :return: NameAvailabilityResponse or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1237,21 +1121,19 @@ async def check_children_name_availability( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.NameAvailabilityResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "NameAvailabilityRequest") - request = build_check_children_name_availability_request( + _request = build_check_children_name_availability_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, @@ -1259,15 +1141,14 @@ async def check_children_name_availability( content_type=content_type, json=_json, content=_content, - template_url=self.check_children_name_availability.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1277,27 +1158,22 @@ async def check_children_name_availability( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response) + deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - check_children_name_availability.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/checkNameAvailability" - } + return deserialized # type: ignore @distributed_trace def list_by_resource_group(self, group_name: str, **kwargs: Any) -> AsyncIterable["_models.DataMigrationService"]: """Get services in resource group. - The Services resource is the top-level resource that represents the Database Migration Service. - This method returns a list of service resources in a resource group. + The Services resource is the top-level resource that represents the Azure Database Migration + Service (classic). This method returns a list of service resources in a resource group. :param group_name: Name of the resource group. Required. :type group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataMigrationService or the result of cls(response) :rtype: @@ -1307,12 +1183,10 @@ def list_by_resource_group(self, group_name: str, **kwargs: Any) -> AsyncIterabl _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataMigrationServiceList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1323,16 +1197,14 @@ def list_by_resource_group(self, group_name: str, **kwargs: Any) -> AsyncIterabl def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( group_name=group_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -1344,13 +1216,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("DataMigrationServiceList", pipeline_response) @@ -1360,10 +1231,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1376,18 +1248,13 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services" - } - @distributed_trace def list(self, **kwargs: Any) -> AsyncIterable["_models.DataMigrationService"]: """Get services in subscription. - The services resource is the top-level resource that represents the Database Migration Service. - This method returns a list of service resources in a subscription. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). This method returns a list of service resources in a subscription. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataMigrationService or the result of cls(response) :rtype: @@ -1397,12 +1264,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.DataMigrationService"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataMigrationServiceList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1413,15 +1278,13 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.DataMigrationService"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -1433,13 +1296,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("DataMigrationServiceList", pipeline_response) @@ -1449,10 +1311,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1465,8 +1328,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/services"} - @overload async def check_name_availability( self, @@ -1487,7 +1348,6 @@ async def check_name_availability( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: NameAvailabilityResponse or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse :raises ~azure.core.exceptions.HttpResponseError: @@ -1495,7 +1355,7 @@ async def check_name_availability( @overload async def check_name_availability( - self, location: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any + self, location: str, parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any ) -> _models.NameAvailabilityResponse: """Check name validity and availability. @@ -1504,11 +1364,10 @@ async def check_name_availability( :param location: The Azure region of the operation. Required. :type location: str :param parameters: Requested name to validate. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: NameAvailabilityResponse or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse :raises ~azure.core.exceptions.HttpResponseError: @@ -1516,7 +1375,7 @@ async def check_name_availability( @distributed_trace_async async def check_name_availability( - self, location: str, parameters: Union[_models.NameAvailabilityRequest, IO], **kwargs: Any + self, location: str, parameters: Union[_models.NameAvailabilityRequest, IO[bytes]], **kwargs: Any ) -> _models.NameAvailabilityResponse: """Check name validity and availability. @@ -1524,17 +1383,14 @@ async def check_name_availability( :param location: The Azure region of the operation. Required. :type location: str - :param parameters: Requested name to validate. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Requested name to validate. Is either a NameAvailabilityRequest type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO[bytes] :return: NameAvailabilityResponse or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1545,36 +1401,33 @@ async def check_name_availability( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.NameAvailabilityResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "NameAvailabilityRequest") - request = build_check_name_availability_request( + _request = build_check_name_availability_request( location=location, subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, content=_content, - template_url=self.check_name_availability.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1584,13 +1437,9 @@ async def check_name_availability( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response) + deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - check_name_availability.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/locations/{location}/checkNameAvailability" - } + return deserialized # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_sql_migration_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_sql_migration_services_operations.py index 179add632186..37209b8d8db6 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_sql_migration_services_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_sql_migration_services_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -17,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -30,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._sql_migration_services_operations import ( build_create_or_update_request, build_delete_node_request, @@ -45,10 +46,10 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -83,12 +84,11 @@ async def get( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SqlMigrationService or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.SqlMigrationService :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -99,25 +99,22 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -126,25 +123,21 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlMigrationService", pipeline_response) + deserialized = self._deserialize("SqlMigrationService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}" - } + return deserialized # type: ignore async def _create_or_update_initial( self, resource_group_name: str, sql_migration_service_name: str, - parameters: Union[_models.SqlMigrationService, IO], + parameters: Union[_models.SqlMigrationService, IO[bytes]], **kwargs: Any - ) -> _models.SqlMigrationService: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -155,21 +148,19 @@ async def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "SqlMigrationService") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, @@ -177,38 +168,34 @@ async def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("SqlMigrationService", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("SqlMigrationService", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}" - } - @overload async def begin_create_or_update( self, @@ -231,14 +218,6 @@ async def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either SqlMigrationService or the result of cls(response) :rtype: @@ -251,7 +230,7 @@ async def begin_create_or_update( self, resource_group_name: str, sql_migration_service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -264,18 +243,10 @@ async def begin_create_or_update( :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str :param parameters: Details of SqlMigrationService resource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either SqlMigrationService or the result of cls(response) :rtype: @@ -288,7 +259,7 @@ async def begin_create_or_update( self, resource_group_name: str, sql_migration_service_name: str, - parameters: Union[_models.SqlMigrationService, IO], + parameters: Union[_models.SqlMigrationService, IO[bytes]], **kwargs: Any ) -> AsyncLROPoller[_models.SqlMigrationService]: """Create or Update Database Migration Service. @@ -298,20 +269,9 @@ async def begin_create_or_update( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationService or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Details of SqlMigrationService resource. Is either a SqlMigrationService + type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationService or IO[bytes] :return: An instance of AsyncLROPoller that returns either SqlMigrationService or the result of cls(response) :rtype: @@ -321,9 +281,7 @@ async def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -341,12 +299,13 @@ async def begin_create_or_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlMigrationService", pipeline_response) + deserialized = self._deserialize("SqlMigrationService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -356,22 +315,20 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.SqlMigrationService].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}" - } + return AsyncLROPoller[_models.SqlMigrationService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) - async def _delete_initial( # pylint: disable=inconsistent-return-statements + async def _delete_initial( self, resource_group_name: str, sql_migration_service_name: str, **kwargs: Any - ) -> None: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -382,39 +339,41 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}" - } + return deserialized # type: ignore @distributed_trace_async async def begin_delete( @@ -427,14 +386,6 @@ async def begin_delete( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -442,15 +393,13 @@ async def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, api_version=api_version, @@ -459,11 +408,12 @@ async def begin_delete( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -472,26 +422,22 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore async def _update_initial( self, resource_group_name: str, sql_migration_service_name: str, - parameters: Union[_models.SqlMigrationServiceUpdate, IO], + parameters: Union[_models.SqlMigrationServiceUpdate, IO[bytes]], **kwargs: Any - ) -> _models.SqlMigrationService: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -502,21 +448,19 @@ async def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "SqlMigrationServiceUpdate") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, @@ -524,38 +468,34 @@ async def _update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("SqlMigrationService", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("SqlMigrationService", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - _update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}" - } - @overload async def begin_update( self, @@ -578,14 +518,6 @@ async def begin_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either SqlMigrationService or the result of cls(response) :rtype: @@ -598,7 +530,7 @@ async def begin_update( self, resource_group_name: str, sql_migration_service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -611,18 +543,10 @@ async def begin_update( :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str :param parameters: Details of SqlMigrationService resource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either SqlMigrationService or the result of cls(response) :rtype: @@ -635,7 +559,7 @@ async def begin_update( self, resource_group_name: str, sql_migration_service_name: str, - parameters: Union[_models.SqlMigrationServiceUpdate, IO], + parameters: Union[_models.SqlMigrationServiceUpdate, IO[bytes]], **kwargs: Any ) -> AsyncLROPoller[_models.SqlMigrationService]: """Update Database Migration Service. @@ -645,20 +569,9 @@ async def begin_update( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationServiceUpdate or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Details of SqlMigrationService resource. Is either a + SqlMigrationServiceUpdate type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationServiceUpdate or IO[bytes] :return: An instance of AsyncLROPoller that returns either SqlMigrationService or the result of cls(response) :rtype: @@ -668,9 +581,7 @@ async def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -688,12 +599,13 @@ async def begin_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlMigrationService", pipeline_response) + deserialized = self._deserialize("SqlMigrationService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -703,17 +615,15 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.SqlMigrationService].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}" - } + return AsyncLROPoller[_models.SqlMigrationService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @distributed_trace def list_by_resource_group( @@ -724,7 +634,6 @@ def list_by_resource_group( :param resource_group_name: Name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. Required. :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either SqlMigrationService or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.SqlMigrationService] @@ -733,12 +642,10 @@ def list_by_resource_group( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SqlMigrationListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -749,16 +656,14 @@ def list_by_resource_group( def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -770,13 +675,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("SqlMigrationListResult", pipeline_response) @@ -786,10 +690,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -801,10 +706,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices" - } - @distributed_trace_async async def list_auth_keys( self, resource_group_name: str, sql_migration_service_name: str, **kwargs: Any @@ -816,12 +717,11 @@ async def list_auth_keys( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: AuthenticationKeys or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.AuthenticationKeys :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -832,25 +732,22 @@ async def list_auth_keys( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.AuthenticationKeys] = kwargs.pop("cls", None) - request = build_list_auth_keys_request( + _request = build_list_auth_keys_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_auth_keys.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -859,16 +756,12 @@ async def list_auth_keys( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("AuthenticationKeys", pipeline_response) + deserialized = self._deserialize("AuthenticationKeys", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - list_auth_keys.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/listAuthKeys" - } + return deserialized # type: ignore @overload async def regenerate_auth_keys( @@ -892,7 +785,6 @@ async def regenerate_auth_keys( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: RegenAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.RegenAuthKeys :raises ~azure.core.exceptions.HttpResponseError: @@ -903,7 +795,7 @@ async def regenerate_auth_keys( self, resource_group_name: str, sql_migration_service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -916,11 +808,10 @@ async def regenerate_auth_keys( :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str :param parameters: Details of SqlMigrationService resource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: RegenAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.RegenAuthKeys :raises ~azure.core.exceptions.HttpResponseError: @@ -931,7 +822,7 @@ async def regenerate_auth_keys( self, resource_group_name: str, sql_migration_service_name: str, - parameters: Union[_models.RegenAuthKeys, IO], + parameters: Union[_models.RegenAuthKeys, IO[bytes]], **kwargs: Any ) -> _models.RegenAuthKeys: """Regenerate a new set of Authentication Keys for Self Hosted Integration Runtime. @@ -941,18 +832,14 @@ async def regenerate_auth_keys( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.datamigration.models.RegenAuthKeys or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Details of SqlMigrationService resource. Is either a RegenAuthKeys type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.RegenAuthKeys or IO[bytes] :return: RegenAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.RegenAuthKeys :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -963,21 +850,19 @@ async def regenerate_auth_keys( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.RegenAuthKeys] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "RegenAuthKeys") - request = build_regenerate_auth_keys_request( + _request = build_regenerate_auth_keys_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, @@ -985,15 +870,14 @@ async def regenerate_auth_keys( content_type=content_type, json=_json, content=_content, - template_url=self.regenerate_auth_keys.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1002,16 +886,12 @@ async def regenerate_auth_keys( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("RegenAuthKeys", pipeline_response) + deserialized = self._deserialize("RegenAuthKeys", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - regenerate_auth_keys.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/regenerateAuthKeys" - } + return deserialized # type: ignore @overload async def delete_node( @@ -1035,7 +915,6 @@ async def delete_node( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DeleteNode or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DeleteNode :raises ~azure.core.exceptions.HttpResponseError: @@ -1046,7 +925,7 @@ async def delete_node( self, resource_group_name: str, sql_migration_service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -1059,11 +938,10 @@ async def delete_node( :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str :param parameters: Details of SqlMigrationService resource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DeleteNode or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DeleteNode :raises ~azure.core.exceptions.HttpResponseError: @@ -1074,7 +952,7 @@ async def delete_node( self, resource_group_name: str, sql_migration_service_name: str, - parameters: Union[_models.DeleteNode, IO], + parameters: Union[_models.DeleteNode, IO[bytes]], **kwargs: Any ) -> _models.DeleteNode: """Delete the integration runtime node. @@ -1084,18 +962,14 @@ async def delete_node( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.datamigration.models.DeleteNode or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Details of SqlMigrationService resource. Is either a DeleteNode type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DeleteNode or IO[bytes] :return: DeleteNode or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DeleteNode :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1106,21 +980,19 @@ async def delete_node( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DeleteNode] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DeleteNode") - request = build_delete_node_request( + _request = build_delete_node_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, @@ -1128,15 +1000,14 @@ async def delete_node( content_type=content_type, json=_json, content=_content, - template_url=self.delete_node.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1145,16 +1016,12 @@ async def delete_node( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DeleteNode", pipeline_response) + deserialized = self._deserialize("DeleteNode", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - delete_node.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/deleteNode" - } + return deserialized # type: ignore @distributed_trace def list_migrations( @@ -1167,7 +1034,6 @@ def list_migrations( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DatabaseMigration or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.DatabaseMigration] @@ -1176,12 +1042,10 @@ def list_migrations( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DatabaseMigrationListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1192,17 +1056,15 @@ def list_migrations( def prepare_request(next_link=None): if not next_link: - request = build_list_migrations_request( + _request = build_list_migrations_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_migrations.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -1214,13 +1076,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("DatabaseMigrationListResult", pipeline_response) @@ -1230,10 +1091,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1245,10 +1107,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_migrations.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/listMigrations" - } - @distributed_trace_async async def list_monitoring_data( self, resource_group_name: str, sql_migration_service_name: str, **kwargs: Any @@ -1261,12 +1119,11 @@ async def list_monitoring_data( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeMonitoringData or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.IntegrationRuntimeMonitoringData :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1277,25 +1134,22 @@ async def list_monitoring_data( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeMonitoringData] = kwargs.pop("cls", None) - request = build_list_monitoring_data_request( + _request = build_list_monitoring_data_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_monitoring_data.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1304,22 +1158,17 @@ async def list_monitoring_data( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - list_monitoring_data.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/listMonitoringData" - } + return deserialized # type: ignore @distributed_trace def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.SqlMigrationService"]: """Retrieve all SQL migration services in the subscriptions. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either SqlMigrationService or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.SqlMigrationService] @@ -1328,12 +1177,10 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.SqlMigra _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SqlMigrationListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1344,15 +1191,13 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.SqlMigra def prepare_request(next_link=None): if not next_link: - request = build_list_by_subscription_request( + _request = build_list_by_subscription_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_subscription.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -1364,13 +1209,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("SqlMigrationListResult", pipeline_response) @@ -1380,10 +1224,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1394,7 +1239,3 @@ async def get_next(next_link=None): return pipeline_response return AsyncItemPaged(get_next, extract_data) - - list_by_subscription.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/sqlMigrationServices" - } diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_tasks_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_tasks_operations.py index fbe2cf4e36a3..8a18020b5fd2 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_tasks_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_tasks_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -20,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._tasks_operations import ( build_cancel_request, build_command_request, @@ -39,10 +38,10 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -72,9 +71,10 @@ def list( ) -> AsyncIterable["_models.ProjectTask"]: """Get tasks in a service. - The services resource is the top-level resource that represents the Database Migration Service. - This method returns a list of tasks owned by a service resource. Some tasks may have a status - of Unknown, which indicates that an error occurred while querying the status of that task. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). This method returns a list of tasks owned by a service resource. Some tasks + may have a status of Unknown, which indicates that an error occurred while querying the status + of that task. :param group_name: Name of the resource group. Required. :type group_name: str @@ -84,7 +84,6 @@ def list( :type project_name: str :param task_type: Filter tasks by task type. Default value is None. :type task_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ProjectTask or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.ProjectTask] :raises ~azure.core.exceptions.HttpResponseError: @@ -92,12 +91,10 @@ def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.TaskList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -108,19 +105,17 @@ def list( def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( group_name=group_name, service_name=service_name, project_name=project_name, subscription_id=self._config.subscription_id, task_type=task_type, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -132,13 +127,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("TaskList", pipeline_response) @@ -148,10 +142,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -164,10 +159,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks" - } - @overload async def create_or_update( self, @@ -183,8 +174,9 @@ async def create_or_update( """Create or update task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The PUT method creates a new task or updates an existing one, although since tasks - have no mutable custom properties, there is little reason to update an existing one. + (classic) instance. The PUT method creates a new task or updates an existing one, although + since tasks have no mutable custom properties, there is little reason to update an existing + one. :param group_name: Name of the resource group. Required. :type group_name: str @@ -199,7 +191,6 @@ async def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -212,7 +203,7 @@ async def create_or_update( service_name: str, project_name: str, task_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -220,8 +211,9 @@ async def create_or_update( """Create or update task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The PUT method creates a new task or updates an existing one, although since tasks - have no mutable custom properties, there is little reason to update an existing one. + (classic) instance. The PUT method creates a new task or updates an existing one, although + since tasks have no mutable custom properties, there is little reason to update an existing + one. :param group_name: Name of the resource group. Required. :type group_name: str @@ -232,11 +224,10 @@ async def create_or_update( :param task_name: Name of the Task. Required. :type task_name: str :param parameters: Information about the task. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -249,14 +240,15 @@ async def create_or_update( service_name: str, project_name: str, task_name: str, - parameters: Union[_models.ProjectTask, IO], + parameters: Union[_models.ProjectTask, IO[bytes]], **kwargs: Any ) -> _models.ProjectTask: """Create or update task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The PUT method creates a new task or updates an existing one, although since tasks - have no mutable custom properties, there is little reason to update an existing one. + (classic) instance. The PUT method creates a new task or updates an existing one, although + since tasks have no mutable custom properties, there is little reason to update an existing + one. :param group_name: Name of the resource group. Required. :type group_name: str @@ -266,17 +258,14 @@ async def create_or_update( :type project_name: str :param task_name: Name of the Task. Required. :type task_name: str - :param parameters: Information about the task. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes] + type. Required. + :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes] :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -287,21 +276,19 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "ProjectTask") - request = build_create_or_update_request( + _request = build_create_or_update_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -311,15 +298,14 @@ async def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -329,21 +315,13 @@ async def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("ProjectTask", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}" - } - @distributed_trace_async async def get( self, @@ -357,7 +335,7 @@ async def get( """Get task information. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The GET method retrieves information about a task. + (classic) instance. The GET method retrieves information about a task. :param group_name: Name of the resource group. Required. :type group_name: str @@ -369,12 +347,11 @@ async def get( :type task_name: str :param expand: Expand the response. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -385,12 +362,10 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -398,15 +373,14 @@ async def get( subscription_id=self._config.subscription_id, expand=expand, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -416,16 +390,12 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}" - } + return deserialized # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements @@ -440,7 +410,7 @@ async def delete( # pylint: disable=inconsistent-return-statements """Delete task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The DELETE method deletes a task, canceling it first if it's running. + (classic) instance. The DELETE method deletes a task, canceling it first if it's running. :param group_name: Name of the resource group. Required. :type group_name: str @@ -453,12 +423,11 @@ async def delete( # pylint: disable=inconsistent-return-statements :param delete_running_tasks: Delete the resource even if it contains running tasks. Default value is None. :type delete_running_tasks: bool - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -469,12 +438,10 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -482,15 +449,14 @@ async def delete( # pylint: disable=inconsistent-return-statements subscription_id=self._config.subscription_id, delete_running_tasks=delete_running_tasks, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -501,11 +467,7 @@ async def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}" - } + return cls(pipeline_response, None, {}) # type: ignore @overload async def update( @@ -522,8 +484,8 @@ async def update( """Create or update task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The PATCH method updates an existing task, but since tasks have no mutable custom - properties, there is little reason to do so. + (classic) instance. The PATCH method updates an existing task, but since tasks have no mutable + custom properties, there is little reason to do so. :param group_name: Name of the resource group. Required. :type group_name: str @@ -538,7 +500,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -551,7 +512,7 @@ async def update( service_name: str, project_name: str, task_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -559,8 +520,8 @@ async def update( """Create or update task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The PATCH method updates an existing task, but since tasks have no mutable custom - properties, there is little reason to do so. + (classic) instance. The PATCH method updates an existing task, but since tasks have no mutable + custom properties, there is little reason to do so. :param group_name: Name of the resource group. Required. :type group_name: str @@ -571,11 +532,10 @@ async def update( :param task_name: Name of the Task. Required. :type task_name: str :param parameters: Information about the task. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -588,14 +548,14 @@ async def update( service_name: str, project_name: str, task_name: str, - parameters: Union[_models.ProjectTask, IO], + parameters: Union[_models.ProjectTask, IO[bytes]], **kwargs: Any ) -> _models.ProjectTask: """Create or update task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The PATCH method updates an existing task, but since tasks have no mutable custom - properties, there is little reason to do so. + (classic) instance. The PATCH method updates an existing task, but since tasks have no mutable + custom properties, there is little reason to do so. :param group_name: Name of the resource group. Required. :type group_name: str @@ -605,17 +565,14 @@ async def update( :type project_name: str :param task_name: Name of the Task. Required. :type task_name: str - :param parameters: Information about the task. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes] + type. Required. + :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes] :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -626,21 +583,19 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "ProjectTask") - request = build_update_request( + _request = build_update_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -650,15 +605,14 @@ async def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -668,16 +622,12 @@ async def update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}" - } + return deserialized # type: ignore @distributed_trace_async async def cancel( @@ -686,7 +636,7 @@ async def cancel( """Cancel a task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. This method cancels a task if it's currently queued or running. + (classic) instance. This method cancels a task if it's currently queued or running. :param group_name: Name of the resource group. Required. :type group_name: str @@ -696,12 +646,11 @@ async def cancel( :type project_name: str :param task_name: Name of the Task. Required. :type task_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -712,27 +661,24 @@ async def cancel( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) - request = build_cancel_request( + _request = build_cancel_request( group_name=group_name, service_name=service_name, project_name=project_name, task_name=task_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.cancel.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -742,16 +688,12 @@ async def cancel( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - cancel.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}/cancel" - } + return deserialized # type: ignore @overload async def command( @@ -768,7 +710,7 @@ async def command( """Execute a command on a task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. This method executes a command on a running task. + (classic) instance. This method executes a command on a running task. :param group_name: Name of the resource group. Required. :type group_name: str @@ -783,7 +725,6 @@ async def command( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: CommandProperties or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.CommandProperties :raises ~azure.core.exceptions.HttpResponseError: @@ -796,7 +737,7 @@ async def command( service_name: str, project_name: str, task_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -804,7 +745,7 @@ async def command( """Execute a command on a task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. This method executes a command on a running task. + (classic) instance. This method executes a command on a running task. :param group_name: Name of the resource group. Required. :type group_name: str @@ -815,11 +756,10 @@ async def command( :param task_name: Name of the Task. Required. :type task_name: str :param parameters: Command to execute. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: CommandProperties or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.CommandProperties :raises ~azure.core.exceptions.HttpResponseError: @@ -832,13 +772,13 @@ async def command( service_name: str, project_name: str, task_name: str, - parameters: Union[_models.CommandProperties, IO], + parameters: Union[_models.CommandProperties, IO[bytes]], **kwargs: Any ) -> _models.CommandProperties: """Execute a command on a task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. This method executes a command on a running task. + (classic) instance. This method executes a command on a running task. :param group_name: Name of the resource group. Required. :type group_name: str @@ -848,17 +788,14 @@ async def command( :type project_name: str :param task_name: Name of the Task. Required. :type task_name: str - :param parameters: Command to execute. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.CommandProperties or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Command to execute. Is either a CommandProperties type or a IO[bytes] type. + Required. + :type parameters: ~azure.mgmt.datamigration.models.CommandProperties or IO[bytes] :return: CommandProperties or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.CommandProperties :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -869,21 +806,19 @@ async def command( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.CommandProperties] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "CommandProperties") - request = build_command_request( + _request = build_command_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -893,15 +828,14 @@ async def command( content_type=content_type, json=_json, content=_content, - template_url=self.command.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -911,13 +845,9 @@ async def command( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("CommandProperties", pipeline_response) + deserialized = self._deserialize("CommandProperties", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - command.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}/command" - } + return deserialized # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_usages_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_usages_operations.py index d652bf441d62..89c0e31abff2 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_usages_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/operations/_usages_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -20,20 +20,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._usages_operations import build_list_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -61,12 +59,11 @@ def __init__(self, *args, **kwargs) -> None: def list(self, location: str, **kwargs: Any) -> AsyncIterable["_models.Quota"]: """Get resource quotas and usage information. - This method returns region-specific quotas and resource usage information for the Database - Migration Service. + This method returns region-specific quotas and resource usage information for the Azure + Database Migration Service (classic). :param location: The Azure region of the operation. Required. :type location: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Quota or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.Quota] :raises ~azure.core.exceptions.HttpResponseError: @@ -74,12 +71,10 @@ def list(self, location: str, **kwargs: Any) -> AsyncIterable["_models.Quota"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.QuotaList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -90,16 +85,14 @@ def list(self, location: str, **kwargs: Any) -> AsyncIterable["_models.Quota"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( location=location, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -111,13 +104,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("QuotaList", pipeline_response) @@ -127,10 +119,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -142,7 +135,3 @@ async def get_next(next_link=None): return pipeline_response return AsyncItemPaged(get_next, extract_data) - - list.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/locations/{location}/usages" - } diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/__init__.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/__init__.py index 3a7960413618..45619266cebc 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/__init__.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/__init__.py @@ -75,8 +75,14 @@ from ._models_py3 import DatabaseFileInput from ._models_py3 import DatabaseInfo from ._models_py3 import DatabaseMigration +from ._models_py3 import DatabaseMigrationBase +from ._models_py3 import DatabaseMigrationBaseListResult +from ._models_py3 import DatabaseMigrationBaseProperties +from ._models_py3 import DatabaseMigrationCosmosDbMongo +from ._models_py3 import DatabaseMigrationCosmosDbMongoListResult from ._models_py3 import DatabaseMigrationListResult from ._models_py3 import DatabaseMigrationProperties +from ._models_py3 import DatabaseMigrationPropertiesCosmosDbMongo from ._models_py3 import DatabaseMigrationPropertiesSqlDb from ._models_py3 import DatabaseMigrationPropertiesSqlMi from ._models_py3 import DatabaseMigrationPropertiesSqlVm @@ -87,7 +93,10 @@ from ._models_py3 import DatabaseSummaryResult from ._models_py3 import DatabaseTable from ._models_py3 import DeleteNode +from ._models_py3 import ErrorAdditionalInfo +from ._models_py3 import ErrorDetail from ._models_py3 import ErrorInfo +from ._models_py3 import ErrorResponse from ._models_py3 import ExecutionStatistics from ._models_py3 import FileList from ._models_py3 import FileShare @@ -210,12 +219,16 @@ from ._models_py3 import MigrationEligibilityInfo from ._models_py3 import MigrationOperationInput from ._models_py3 import MigrationReportResult +from ._models_py3 import MigrationService +from ._models_py3 import MigrationServiceListResult +from ._models_py3 import MigrationServiceUpdate from ._models_py3 import MigrationStatusDetails from ._models_py3 import MigrationTableMetadata from ._models_py3 import MigrationValidationDatabaseLevelResult from ._models_py3 import MigrationValidationDatabaseSummaryResult from ._models_py3 import MigrationValidationOptions from ._models_py3 import MigrationValidationResult +from ._models_py3 import MongoConnectionInformation from ._models_py3 import MongoDbCancelCommand from ._models_py3 import MongoDbClusterInfo from ._models_py3 import MongoDbCollectionInfo @@ -238,6 +251,8 @@ from ._models_py3 import MongoDbShardKeyInfo from ._models_py3 import MongoDbShardKeySetting from ._models_py3 import MongoDbThrottlingSettings +from ._models_py3 import MongoMigrationCollection +from ._models_py3 import MongoMigrationProgressDetails from ._models_py3 import MySqlConnectionInfo from ._models_py3 import NameAvailabilityRequest from ._models_py3 import NameAvailabilityResponse @@ -262,6 +277,7 @@ from ._models_py3 import ProjectTask from ._models_py3 import ProjectTaskProperties from ._models_py3 import ProxyResource +from ._models_py3 import ProxyResourceAutoGenerated from ._models_py3 import QueryAnalysisValidationResult from ._models_py3 import QueryExecutionResult from ._models_py3 import Quota @@ -270,6 +286,7 @@ from ._models_py3 import RegenAuthKeys from ._models_py3 import ReportableException from ._models_py3 import Resource +from ._models_py3 import ResourceAutoGenerated from ._models_py3 import ResourceSku from ._models_py3 import ResourceSkuCapabilities from ._models_py3 import ResourceSkuCapacity @@ -303,9 +320,11 @@ from ._models_py3 import StartMigrationScenarioServerRoleResult from ._models_py3 import SyncMigrationDatabaseErrorEvent from ._models_py3 import SystemData +from ._models_py3 import SystemDataAutoGenerated from ._models_py3 import TargetLocation from ._models_py3 import TaskList from ._models_py3 import TrackedResource +from ._models_py3 import TrackedResourceAutoGenerated from ._models_py3 import UploadOCIDriverTaskInput from ._models_py3 import UploadOCIDriverTaskOutput from ._models_py3 import UploadOCIDriverTaskProperties @@ -348,6 +367,7 @@ from ._data_migration_management_client_enums import MongoDbProgressResultType from ._data_migration_management_client_enums import MongoDbReplication from ._data_migration_management_client_enums import MongoDbShardKeyOrder +from ._data_migration_management_client_enums import MongoMigrationStatus from ._data_migration_management_client_enums import MySqlTargetPlatformType from ._data_migration_management_client_enums import NameCheckFailureReason from ._data_migration_management_client_enums import ObjectType @@ -355,6 +375,7 @@ from ._data_migration_management_client_enums import ProjectProvisioningState from ._data_migration_management_client_enums import ProjectSourcePlatform from ._data_migration_management_client_enums import ProjectTargetPlatform +from ._data_migration_management_client_enums import ProvisioningState from ._data_migration_management_client_enums import ReplicateMigrationState from ._data_migration_management_client_enums import ResourceSkuCapacityScaleType from ._data_migration_management_client_enums import ResourceSkuRestrictionsReasonCode @@ -452,8 +473,14 @@ "DatabaseFileInput", "DatabaseInfo", "DatabaseMigration", + "DatabaseMigrationBase", + "DatabaseMigrationBaseListResult", + "DatabaseMigrationBaseProperties", + "DatabaseMigrationCosmosDbMongo", + "DatabaseMigrationCosmosDbMongoListResult", "DatabaseMigrationListResult", "DatabaseMigrationProperties", + "DatabaseMigrationPropertiesCosmosDbMongo", "DatabaseMigrationPropertiesSqlDb", "DatabaseMigrationPropertiesSqlMi", "DatabaseMigrationPropertiesSqlVm", @@ -464,7 +491,10 @@ "DatabaseSummaryResult", "DatabaseTable", "DeleteNode", + "ErrorAdditionalInfo", + "ErrorDetail", "ErrorInfo", + "ErrorResponse", "ExecutionStatistics", "FileList", "FileShare", @@ -587,12 +617,16 @@ "MigrationEligibilityInfo", "MigrationOperationInput", "MigrationReportResult", + "MigrationService", + "MigrationServiceListResult", + "MigrationServiceUpdate", "MigrationStatusDetails", "MigrationTableMetadata", "MigrationValidationDatabaseLevelResult", "MigrationValidationDatabaseSummaryResult", "MigrationValidationOptions", "MigrationValidationResult", + "MongoConnectionInformation", "MongoDbCancelCommand", "MongoDbClusterInfo", "MongoDbCollectionInfo", @@ -615,6 +649,8 @@ "MongoDbShardKeyInfo", "MongoDbShardKeySetting", "MongoDbThrottlingSettings", + "MongoMigrationCollection", + "MongoMigrationProgressDetails", "MySqlConnectionInfo", "NameAvailabilityRequest", "NameAvailabilityResponse", @@ -639,6 +675,7 @@ "ProjectTask", "ProjectTaskProperties", "ProxyResource", + "ProxyResourceAutoGenerated", "QueryAnalysisValidationResult", "QueryExecutionResult", "Quota", @@ -647,6 +684,7 @@ "RegenAuthKeys", "ReportableException", "Resource", + "ResourceAutoGenerated", "ResourceSku", "ResourceSkuCapabilities", "ResourceSkuCapacity", @@ -680,9 +718,11 @@ "StartMigrationScenarioServerRoleResult", "SyncMigrationDatabaseErrorEvent", "SystemData", + "SystemDataAutoGenerated", "TargetLocation", "TaskList", "TrackedResource", + "TrackedResourceAutoGenerated", "UploadOCIDriverTaskInput", "UploadOCIDriverTaskOutput", "UploadOCIDriverTaskProperties", @@ -724,6 +764,7 @@ "MongoDbProgressResultType", "MongoDbReplication", "MongoDbShardKeyOrder", + "MongoMigrationStatus", "MySqlTargetPlatformType", "NameCheckFailureReason", "ObjectType", @@ -731,6 +772,7 @@ "ProjectProvisioningState", "ProjectSourcePlatform", "ProjectTargetPlatform", + "ProvisioningState", "ReplicateMigrationState", "ResourceSkuCapacityScaleType", "ResourceSkuRestrictionsReasonCode", diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_data_migration_management_client_enums.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_data_migration_management_client_enums.py index 0fa404f7b186..0e9fc1502e50 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_data_migration_management_client_enums.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_data_migration_management_client_enums.py @@ -72,7 +72,7 @@ class CommandType(str, Enum, metaclass=CaseInsensitiveEnumMeta): class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """CreatedByType.""" + """The type of identity that created the resource.""" USER = "User" APPLICATION = "Application" @@ -270,6 +270,16 @@ class MongoDbShardKeyOrder(str, Enum, metaclass=CaseInsensitiveEnumMeta): HASHED = "Hashed" +class MongoMigrationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Migration Status.""" + + NOT_STARTED = "NotStarted" + IN_PROGRESS = "InProgress" + COMPLETED = "Completed" + FAILED = "Failed" + CANCELED = "Canceled" + + class MySqlTargetPlatformType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """An enumeration of possible target types when migrating from MySQL.""" @@ -329,6 +339,18 @@ class ProjectTargetPlatform(str, Enum, metaclass=CaseInsensitiveEnumMeta): UNKNOWN = "Unknown" +class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning State of migration. ProvisioningState as Succeeded implies that validations have + been performed and migration has started. + """ + + PROVISIONING = "Provisioning" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + + class ReplicateMigrationState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Wrapper for replicate reported migration states.""" @@ -367,6 +389,7 @@ class ResourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): SQL_MI = "SqlMi" SQL_VM = "SqlVm" SQL_DB = "SqlDb" + MONGO_TO_COSMOS_DB_MONGO = "MongoToCosmosDbMongo" class ScenarioSource(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -471,7 +494,7 @@ class SqlSourcePlatform(str, Enum, metaclass=CaseInsensitiveEnumMeta): class SsisMigrationOverwriteOption(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The overwrite option for SSIS object migration, only ignore and overwrite are supported in DMS - now and future may add Reuse option for container object. + (classic) now and future may add Reuse option for container object. """ IGNORE = "Ignore" @@ -488,7 +511,7 @@ class SsisMigrationStage(str, Enum, metaclass=CaseInsensitiveEnumMeta): class SsisStoreType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """An enumeration of supported source SSIS store type in DMS.""" + """An enumeration of supported source SSIS store type in DMS (classic).""" SSIS_CATALOG = "SsisCatalog" diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_models_py3.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_models_py3.py index 3a2b38ac41ea..91242b4b53fd 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_models_py3.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/models/_models_py3.py @@ -32,7 +32,7 @@ class ApiError(_serialization.Model): :ivar error: Error information in OData format. :vartype error: ~azure.mgmt.datamigration.models.ODataError :ivar system_data: Metadata pertaining to creation and last modification of the resource. - :vartype system_data: ~azure.mgmt.datamigration.models.SystemData + :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated """ _validation = { @@ -41,10 +41,10 @@ class ApiError(_serialization.Model): _attribute_map = { "error": {"key": "error", "type": "ODataError"}, - "system_data": {"key": "systemData", "type": "SystemData"}, + "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"}, } - def __init__(self, *, error: Optional["_models.ODataError"] = None, **kwargs): + def __init__(self, *, error: Optional["_models.ODataError"] = None, **kwargs: Any) -> None: """ :keyword error: Error information in OData format. :paramtype error: ~azure.mgmt.datamigration.models.ODataError @@ -68,7 +68,7 @@ class AuthenticationKeys(_serialization.Model): "auth_key2": {"key": "authKey2", "type": "str"}, } - def __init__(self, *, auth_key1: Optional[str] = None, auth_key2: Optional[str] = None, **kwargs): + def __init__(self, *, auth_key1: Optional[str] = None, auth_key2: Optional[str] = None, **kwargs: Any) -> None: """ :keyword auth_key1: The first authentication key. :paramtype auth_key1: str @@ -103,8 +103,8 @@ def __init__( resource_type: Optional[str] = None, sku: Optional["_models.AvailableServiceSkuSku"] = None, capacity: Optional["_models.AvailableServiceSkuCapacity"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword resource_type: The resource type, including the provider namespace. :paramtype resource_type: str @@ -147,8 +147,8 @@ def __init__( maximum: Optional[int] = None, default: Optional[int] = None, scale_type: Optional[Union[str, "_models.ServiceScalability"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword minimum: The minimum capacity, usually 0 or 1. :paramtype minimum: int @@ -194,8 +194,8 @@ def __init__( family: Optional[str] = None, size: Optional[str] = None, tier: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: The name of the SKU. :paramtype name: str @@ -240,8 +240,8 @@ def __init__( app_key: Optional[str] = None, tenant_id: Optional[str] = None, ignore_azure_permissions: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword application_id: Application ID of the Azure Active Directory Application. :paramtype application_id: str @@ -282,8 +282,8 @@ def __init__( storage_account_resource_id: Optional[str] = None, account_key: Optional[str] = None, blob_container_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword storage_account_resource_id: Resource Id of the storage account where backups are stored. @@ -318,8 +318,8 @@ def __init__( *, source_location: Optional["_models.SourceLocation"] = None, target_location: Optional["_models.TargetLocation"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_location: Source location of backups. :paramtype source_location: ~azure.mgmt.datamigration.models.SourceLocation @@ -355,8 +355,8 @@ def __init__( file_location: Optional[str] = None, family_sequence_number: Optional[int] = None, status: Optional[Union[str, "_models.BackupFileStatus"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword file_location: Location of the backup file in shared folder. :paramtype file_location: str @@ -425,8 +425,8 @@ def __init__( backup_start_date: Optional[datetime.datetime] = None, backup_finished_date: Optional[datetime.datetime] = None, is_backup_restored: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword backup_set_id: Id for the set of backup files. :paramtype backup_set_id: str @@ -475,7 +475,7 @@ class BlobShare(_serialization.Model): "sas_uri": {"key": "sasUri", "type": "str"}, } - def __init__(self, *, sas_uri: Optional[str] = None, **kwargs): + def __init__(self, *, sas_uri: Optional[str] = None, **kwargs: Any) -> None: """ :keyword sas_uri: SAS URI of Azure Storage Account Container. :paramtype sas_uri: str @@ -495,7 +495,7 @@ class CheckOCIDriverTaskInput(_serialization.Model): "server_version": {"key": "serverVersion", "type": "str"}, } - def __init__(self, *, server_version: Optional[str] = None, **kwargs): + def __init__(self, *, server_version: Optional[str] = None, **kwargs: Any) -> None: """ :keyword server_version: Version of the source server to check against. Optional. :paramtype server_version: str @@ -524,7 +524,7 @@ class CheckOCIDriverTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, *, installed_driver: Optional["_models.OracleOCIDriverInfo"] = None, **kwargs): + def __init__(self, *, installed_driver: Optional["_models.OracleOCIDriverInfo"] = None, **kwargs: Any) -> None: """ :keyword installed_driver: Information about the installed driver if found and valid. :paramtype installed_driver: ~azure.mgmt.datamigration.models.OracleOCIDriverInfo @@ -535,7 +535,8 @@ def __init__(self, *, installed_driver: Optional["_models.OracleOCIDriverInfo"] class ProjectTaskProperties(_serialization.Model): - """Base class for all types of DMS task properties. If task is not supported by current client, this object is returned. + """Base class for all types of DMS (classic) task properties. If task is not supported by current + client, this object is returned. You probably want to use the sub-classes and not this class directly. Known sub-classes are: ConnectToMongoDbTaskProperties, ConnectToSourceMySqlTaskProperties, @@ -564,7 +565,7 @@ class ProjectTaskProperties(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -654,7 +655,7 @@ class ProjectTaskProperties(_serialization.Model): } } - def __init__(self, *, client_data: Optional[Dict[str, str]] = None, **kwargs): + def __init__(self, *, client_data: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -672,7 +673,7 @@ class CheckOCIDriverTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -731,8 +732,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.CheckOCIDriverTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -746,7 +747,8 @@ def __init__( class CommandProperties(_serialization.Model): - """Base class for all types of DMS command properties. If command is not supported by current client, this object is returned. + """Base class for all types of DMS (classic) command properties. If command is not supported by + current client, this object is returned. You probably want to use the sub-classes and not this class directly. Known sub-classes are: MigrateMISyncCompleteCommandProperties, MigrateSyncCompleteCommandProperties, @@ -754,7 +756,7 @@ class CommandProperties(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar command_type: Command type. Required. Known values are: "Migrate.Sync.Complete.Database", "Migrate.SqlServer.AzureDbSqlMi.Complete", "cancel", "finish", and "restart". @@ -788,7 +790,7 @@ class CommandProperties(_serialization.Model): } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.command_type: Optional[str] = None @@ -803,7 +805,7 @@ class ConnectionInfo(_serialization.Model): MiSqlConnectionInfo, MongoDbConnectionInfo, MySqlConnectionInfo, OracleConnectionInfo, PostgreSqlConnectionInfo, SqlConnectionInfo - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Type of connection info. Required. :vartype type: str @@ -834,7 +836,7 @@ class ConnectionInfo(_serialization.Model): } } - def __init__(self, *, user_name: Optional[str] = None, password: Optional[str] = None, **kwargs): + def __init__(self, *, user_name: Optional[str] = None, password: Optional[str] = None, **kwargs: Any) -> None: """ :keyword user_name: User name. :paramtype user_name: str @@ -848,11 +850,12 @@ def __init__(self, *, user_name: Optional[str] = None, password: Optional[str] = class ConnectToMongoDbTaskProperties(ProjectTaskProperties): - """Properties for the task that validates the connection to and provides information about a MongoDB server. + """Properties for the task that validates the connection to and provides information about a + MongoDB server. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -911,8 +914,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.MongoDbConnectionInfo"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -928,7 +931,7 @@ def __init__( class ConnectToSourceMySqlTaskInput(_serialization.Model): """Input for the task that validates MySQL database connection. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Information for connecting to MySQL source. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo @@ -962,8 +965,8 @@ def __init__( target_platform: Optional[Union[str, "_models.MySqlTargetPlatformType"]] = None, check_permissions_group: Optional[Union[str, "_models.ServerLevelPermissionsGroup"]] = None, is_offline_migration: bool = False, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Information for connecting to MySQL source. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo @@ -990,7 +993,7 @@ class ConnectToSourceMySqlTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -1049,8 +1052,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.ConnectToSourceMySqlTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -1096,7 +1099,7 @@ class ConnectToSourceNonSqlTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -1109,7 +1112,7 @@ def __init__(self, **kwargs): class ConnectToSourceOracleSyncTaskInput(_serialization.Model): """Input for the task that validates Oracle database connection. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Information for connecting to Oracle source. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.OracleConnectionInfo @@ -1123,7 +1126,7 @@ class ConnectToSourceOracleSyncTaskInput(_serialization.Model): "source_connection_info": {"key": "sourceConnectionInfo", "type": "OracleConnectionInfo"}, } - def __init__(self, *, source_connection_info: "_models.OracleConnectionInfo", **kwargs): + def __init__(self, *, source_connection_info: "_models.OracleConnectionInfo", **kwargs: Any) -> None: """ :keyword source_connection_info: Information for connecting to Oracle source. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.OracleConnectionInfo @@ -1161,7 +1164,7 @@ class ConnectToSourceOracleSyncTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.source_server_version = None @@ -1175,7 +1178,7 @@ class ConnectToSourceOracleSyncTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -1234,8 +1237,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.ConnectToSourceOracleSyncTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -1251,7 +1254,7 @@ def __init__( class ConnectToSourcePostgreSqlSyncTaskInput(_serialization.Model): """Input for the task that validates connection to PostgreSQL and source server requirements. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Connection information for source PostgreSQL server. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.PostgreSqlConnectionInfo @@ -1265,7 +1268,7 @@ class ConnectToSourcePostgreSqlSyncTaskInput(_serialization.Model): "source_connection_info": {"key": "sourceConnectionInfo", "type": "PostgreSqlConnectionInfo"}, } - def __init__(self, *, source_connection_info: "_models.PostgreSqlConnectionInfo", **kwargs): + def __init__(self, *, source_connection_info: "_models.PostgreSqlConnectionInfo", **kwargs: Any) -> None: """ :keyword source_connection_info: Connection information for source PostgreSQL server. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.PostgreSqlConnectionInfo @@ -1307,7 +1310,7 @@ class ConnectToSourcePostgreSqlSyncTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -1317,12 +1320,13 @@ def __init__(self, **kwargs): self.validation_errors = None -class ConnectToSourcePostgreSqlSyncTaskProperties(ProjectTaskProperties): - """Properties for the task that validates connection to PostgreSQL server and source server requirements for online migration. +class ConnectToSourcePostgreSqlSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long + """Properties for the task that validates connection to PostgreSQL server and source server + requirements for online migration. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -1381,8 +1385,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.ConnectToSourcePostgreSqlSyncTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -1395,12 +1399,13 @@ def __init__( self.output = None -class ConnectToSourceSqlServerSyncTaskProperties(ProjectTaskProperties): - """Properties for the task that validates connection to SQL Server and source server requirements for online migration. +class ConnectToSourceSqlServerSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long + """Properties for the task that validates connection to SQL Server and source server requirements + for online migration. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -1459,8 +1464,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.ConnectToSourceSqlServerTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -1474,9 +1479,10 @@ def __init__( class ConnectToSourceSqlServerTaskInput(_serialization.Model): - """Input for the task that validates connection to SQL Server and also validates source server requirements. + """Input for the task that validates connection to SQL Server and also validates source server + requirements. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Connection information for Source SQL Server. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -1527,8 +1533,8 @@ def __init__( collect_tde_certificate_info: bool = False, validate_ssis_catalog_only: bool = False, encrypted_key_for_secure_fields: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Connection information for Source SQL Server. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -1564,7 +1570,8 @@ def __init__( class ConnectToSourceSqlServerTaskOutput(_serialization.Model): - """Output for the task that validates connection to SQL Server and also validates source server requirements. + """Output for the task that validates connection to SQL Server and also validates source server + requirements. You probably want to use the sub-classes and not this class directly. Known sub-classes are: ConnectToSourceSqlServerTaskOutputAgentJobLevel, @@ -1573,7 +1580,7 @@ class ConnectToSourceSqlServerTaskOutput(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -1600,19 +1607,22 @@ class ConnectToSourceSqlServerTaskOutput(_serialization.Model): } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None self.result_type: Optional[str] = None -class ConnectToSourceSqlServerTaskOutputAgentJobLevel(ConnectToSourceSqlServerTaskOutput): - """Agent Job level output for the task that validates connection to SQL Server and also validates source server requirements. +class ConnectToSourceSqlServerTaskOutputAgentJobLevel( + ConnectToSourceSqlServerTaskOutput +): # pylint: disable=name-too-long + """Agent Job level output for the task that validates connection to SQL Server and also validates + source server requirements. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -1658,7 +1668,7 @@ class ConnectToSourceSqlServerTaskOutputAgentJobLevel(ConnectToSourceSqlServerTa "migration_eligibility": {"key": "migrationEligibility", "type": "MigrationEligibilityInfo"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "AgentJobLevelOutput" @@ -1671,12 +1681,15 @@ def __init__(self, **kwargs): self.migration_eligibility = None -class ConnectToSourceSqlServerTaskOutputDatabaseLevel(ConnectToSourceSqlServerTaskOutput): - """Database level output for the task that validates connection to SQL Server and also validates source server requirements. +class ConnectToSourceSqlServerTaskOutputDatabaseLevel( + ConnectToSourceSqlServerTaskOutput +): # pylint: disable=name-too-long + """Database level output for the task that validates connection to SQL Server and also validates + source server requirements. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -1718,7 +1731,7 @@ class ConnectToSourceSqlServerTaskOutputDatabaseLevel(ConnectToSourceSqlServerTa "database_state": {"key": "databaseState", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "DatabaseLevelOutput" @@ -1729,12 +1742,13 @@ def __init__(self, **kwargs): self.database_state = None -class ConnectToSourceSqlServerTaskOutputLoginLevel(ConnectToSourceSqlServerTaskOutput): - """Login level output for the task that validates connection to SQL Server and also validates source server requirements. +class ConnectToSourceSqlServerTaskOutputLoginLevel(ConnectToSourceSqlServerTaskOutput): # pylint: disable=name-too-long + """Login level output for the task that validates connection to SQL Server and also validates + source server requirements. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -1773,7 +1787,7 @@ class ConnectToSourceSqlServerTaskOutputLoginLevel(ConnectToSourceSqlServerTaskO "migration_eligibility": {"key": "migrationEligibility", "type": "MigrationEligibilityInfo"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "LoginLevelOutput" @@ -1784,12 +1798,13 @@ def __init__(self, **kwargs): self.migration_eligibility = None -class ConnectToSourceSqlServerTaskOutputTaskLevel(ConnectToSourceSqlServerTaskOutput): - """Task level output for the task that validates connection to SQL Server and also validates source server requirements. +class ConnectToSourceSqlServerTaskOutputTaskLevel(ConnectToSourceSqlServerTaskOutput): # pylint: disable=name-too-long + """Task level output for the task that validates connection to SQL Server and also validates + source server requirements. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -1836,7 +1851,7 @@ class ConnectToSourceSqlServerTaskOutputTaskLevel(ConnectToSourceSqlServerTaskOu "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "TaskLevelOutput" @@ -1850,11 +1865,12 @@ def __init__(self, **kwargs): class ConnectToSourceSqlServerTaskProperties(ProjectTaskProperties): - """Properties for the task that validates connection to SQL Server and also validates source server requirements. + """Properties for the task that validates connection to SQL Server and also validates source + server requirements. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -1917,8 +1933,8 @@ def __init__( client_data: Optional[Dict[str, str]] = None, input: Optional["_models.ConnectToSourceSqlServerTaskInput"] = None, task_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -1935,9 +1951,10 @@ def __init__( class ConnectToTargetAzureDbForMySqlTaskInput(_serialization.Model): - """Input for the task that validates connection to Azure Database for MySQL and target server requirements. + """Input for the task that validates connection to Azure Database for MySQL and target server + requirements. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Connection information for source MySQL server. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo @@ -1965,8 +1982,8 @@ def __init__( source_connection_info: "_models.MySqlConnectionInfo", target_connection_info: "_models.MySqlConnectionInfo", is_offline_migration: bool = False, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Connection information for source MySQL server. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo @@ -1983,7 +2000,8 @@ def __init__( class ConnectToTargetAzureDbForMySqlTaskOutput(_serialization.Model): - """Output for the task that validates connection to Azure Database for MySQL and target server requirements. + """Output for the task that validates connection to Azure Database for MySQL and target server + requirements. Variables are only populated by the server, and will be ignored when sending a request. @@ -2015,7 +2033,7 @@ class ConnectToTargetAzureDbForMySqlTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -2025,12 +2043,13 @@ def __init__(self, **kwargs): self.validation_errors = None -class ConnectToTargetAzureDbForMySqlTaskProperties(ProjectTaskProperties): - """Properties for the task that validates connection to Azure Database for MySQL and target server requirements. +class ConnectToTargetAzureDbForMySqlTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long + """Properties for the task that validates connection to Azure Database for MySQL and target server + requirements. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -2090,8 +2109,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.ConnectToTargetAzureDbForMySqlTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -2104,10 +2123,11 @@ def __init__( self.output = None -class ConnectToTargetAzureDbForPostgreSqlSyncTaskInput(_serialization.Model): - """Input for the task that validates connection to Azure Database for PostgreSQL and target server requirements. +class ConnectToTargetAzureDbForPostgreSqlSyncTaskInput(_serialization.Model): # pylint: disable=name-too-long + """Input for the task that validates connection to Azure Database for PostgreSQL and target server + requirements. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Connection information for source PostgreSQL server. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.PostgreSqlConnectionInfo @@ -2131,8 +2151,8 @@ def __init__( *, source_connection_info: "_models.PostgreSqlConnectionInfo", target_connection_info: "_models.PostgreSqlConnectionInfo", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Connection information for source PostgreSQL server. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.PostgreSqlConnectionInfo @@ -2145,8 +2165,9 @@ def __init__( self.target_connection_info = target_connection_info -class ConnectToTargetAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model): - """Output for the task that validates connection to Azure Database for PostgreSQL and target server requirements. +class ConnectToTargetAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model): # pylint: disable=name-too-long + """Output for the task that validates connection to Azure Database for PostgreSQL and target + server requirements. Variables are only populated by the server, and will be ignored when sending a request. @@ -2178,7 +2199,7 @@ class ConnectToTargetAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -2188,12 +2209,13 @@ def __init__(self, **kwargs): self.validation_errors = None -class ConnectToTargetAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties): - """Properties for the task that validates connection to Azure Database For PostgreSQL server and target server requirements for online migration. +class ConnectToTargetAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long + """Properties for the task that validates connection to Azure Database For PostgreSQL server and + target server requirements for online migration. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -2254,8 +2276,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.ConnectToTargetAzureDbForPostgreSqlSyncTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -2269,10 +2291,11 @@ def __init__( self.output = None -class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskInput(_serialization.Model): - """Input for the task that validates connection to Azure Database for PostgreSQL and target server requirements for Oracle source. +class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskInput(_serialization.Model): # pylint: disable=name-too-long + """Input for the task that validates connection to Azure Database for PostgreSQL and target server + requirements for Oracle source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar target_connection_info: Connection information for target Azure Database for PostgreSQL server. Required. @@ -2287,7 +2310,7 @@ class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskInput(_serialization.Mode "target_connection_info": {"key": "targetConnectionInfo", "type": "PostgreSqlConnectionInfo"}, } - def __init__(self, *, target_connection_info: "_models.PostgreSqlConnectionInfo", **kwargs): + def __init__(self, *, target_connection_info: "_models.PostgreSqlConnectionInfo", **kwargs: Any) -> None: """ :keyword target_connection_info: Connection information for target Azure Database for PostgreSQL server. Required. @@ -2297,8 +2320,9 @@ def __init__(self, *, target_connection_info: "_models.PostgreSqlConnectionInfo" self.target_connection_info = target_connection_info -class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model): - """Output for the task that validates connection to Azure Database for PostgreSQL and target server requirements for Oracle source. +class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model): # pylint: disable=name-too-long + """Output for the task that validates connection to Azure Database for PostgreSQL and target + server requirements for Oracle source. Variables are only populated by the server, and will be ignored when sending a request. @@ -2339,12 +2363,12 @@ def __init__( database_schema_map: Optional[ List["_models.ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapItem"] ] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword database_schema_map: Mapping of schemas per database. :paramtype database_schema_map: - list[~azure.mgmt.datamigration.models.ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapItem] + list[~azure.mgmt.datamigration.models.ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapItem] # pylint: disable=line-too-long """ super().__init__(**kwargs) self.target_server_version = None @@ -2354,7 +2378,9 @@ def __init__( self.database_schema_map = database_schema_map -class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapItem(_serialization.Model): +class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapItem( + _serialization.Model +): # pylint: disable=name-too-long """ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapItem. :ivar database: @@ -2368,7 +2394,7 @@ class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskOutputDatabaseSchemaMapIt "schemas": {"key": "schemas", "type": "[str]"}, } - def __init__(self, *, database: Optional[str] = None, schemas: Optional[List[str]] = None, **kwargs): + def __init__(self, *, database: Optional[str] = None, schemas: Optional[List[str]] = None, **kwargs: Any) -> None: """ :keyword database: :paramtype database: str @@ -2380,12 +2406,15 @@ def __init__(self, *, database: Optional[str] = None, schemas: Optional[List[str self.schemas = schemas -class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties): - """Properties for the task that validates connection to Azure Database For PostgreSQL server and target server requirements for online migration for Oracle source. +class ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskProperties( + ProjectTaskProperties +): # pylint: disable=name-too-long + """Properties for the task that validates connection to Azure Database For PostgreSQL server and + target server requirements for online migration for Oracle source. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -2446,8 +2475,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.ConnectToTargetOracleAzureDbForPostgreSqlSyncTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -2464,7 +2493,7 @@ def __init__( class ConnectToTargetSqlDbSyncTaskInput(_serialization.Model): """Input for the task that validates connection to Azure SQL DB and target server requirements. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Connection information for source SQL Server. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -2487,8 +2516,8 @@ def __init__( *, source_connection_info: "_models.SqlConnectionInfo", target_connection_info: "_models.SqlConnectionInfo", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Connection information for source SQL Server. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -2501,11 +2530,12 @@ def __init__( class ConnectToTargetSqlDbSyncTaskProperties(ProjectTaskProperties): - """Properties for the task that validates connection to SQL DB and target server requirements for online migration. + """Properties for the task that validates connection to SQL DB and target server requirements for + online migration. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -2564,8 +2594,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.ConnectToTargetSqlDbSyncTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -2581,7 +2611,7 @@ def __init__( class ConnectToTargetSqlDbTaskInput(_serialization.Model): """Input for the task that validates connection to SQL DB and target server requirements. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar target_connection_info: Connection information for target SQL DB. Required. :vartype target_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -2604,8 +2634,8 @@ def __init__( *, target_connection_info: "_models.SqlConnectionInfo", query_object_counts: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword target_connection_info: Connection information for target SQL DB. Required. :paramtype target_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -2647,7 +2677,7 @@ class ConnectToTargetSqlDbTaskOutput(_serialization.Model): "target_server_brand_version": {"key": "targetServerBrandVersion", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -2661,7 +2691,7 @@ class ConnectToTargetSqlDbTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -2724,8 +2754,8 @@ def __init__( client_data: Optional[Dict[str, str]] = None, input: Optional["_models.ConnectToTargetSqlDbTaskInput"] = None, created_on: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -2742,15 +2772,17 @@ def __init__( class ConnectToTargetSqlMISyncTaskInput(_serialization.Model): - """Input for the task that validates connection to Azure SQL Database Managed Instance online scenario. + """Input for the task that validates connection to Azure SQL Database Managed Instance online + scenario. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar target_connection_info: Connection information for Azure SQL Database Managed Instance. Required. :vartype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo - :ivar azure_app: Azure Active Directory Application the DMS instance will use to connect to the - target instance of Azure SQL Database Managed Instance and the Azure Storage Account. Required. + :ivar azure_app: Azure Active Directory Application the DMS (classic) instance will use to + connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage + Account. Required. :vartype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp """ @@ -2769,15 +2801,15 @@ def __init__( *, target_connection_info: "_models.MiSqlConnectionInfo", azure_app: "_models.AzureActiveDirectoryApp", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword target_connection_info: Connection information for Azure SQL Database Managed Instance. Required. :paramtype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo - :keyword azure_app: Azure Active Directory Application the DMS instance will use to connect to - the target instance of Azure SQL Database Managed Instance and the Azure Storage Account. - Required. + :keyword azure_app: Azure Active Directory Application the DMS (classic) instance will use to + connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage + Account. Required. :paramtype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp """ super().__init__(**kwargs) @@ -2810,7 +2842,7 @@ class ConnectToTargetSqlMISyncTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.target_server_version = None @@ -2823,7 +2855,7 @@ class ConnectToTargetSqlMISyncTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -2882,8 +2914,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.ConnectToTargetSqlMISyncTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -2899,7 +2931,7 @@ def __init__( class ConnectToTargetSqlMITaskInput(_serialization.Model): """Input for the task that validates connection to Azure SQL Database Managed Instance. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar target_connection_info: Connection information for target SQL Server. Required. :vartype target_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -2930,8 +2962,8 @@ def __init__( collect_logins: bool = True, collect_agent_jobs: bool = True, validate_ssis_catalog_only: bool = False, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword target_connection_info: Connection information for target SQL Server. Required. :paramtype target_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -2987,7 +3019,7 @@ class ConnectToTargetSqlMITaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -3003,7 +3035,7 @@ class ConnectToTargetSqlMITaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -3062,8 +3094,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.ConnectToTargetSqlMITaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -3133,7 +3165,7 @@ class CopyProgressDetails(_serialization.Model): # pylint: disable=too-many-ins "copy_duration": {"key": "copyDuration", "type": "int"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.table_name = None @@ -3233,8 +3265,8 @@ def __init__( server_visible_online_core_count: Optional[int] = None, database_state: Optional[Union[str, "_models.DatabaseState"]] = None, server_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword id: Unique identifier for the database. :paramtype id: str @@ -3343,7 +3375,7 @@ class DatabaseBackupInfo(_serialization.Model): "backup_finish_date": {"key": "backupFinishDate", "type": "iso-8601"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.database_name = None @@ -3396,8 +3428,8 @@ def __init__( restore_full_name: Optional[str] = None, file_type: Optional[Union[str, "_models.DatabaseFileType"]] = None, size_mb: Optional[float] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword database_name: Name of the database. :paramtype database_name: str @@ -3457,8 +3489,8 @@ def __init__( physical_full_name: Optional[str] = None, restore_full_name: Optional[str] = None, file_type: Optional[Union[str, "_models.DatabaseFileType"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword id: Unique identifier for database file. :paramtype id: str @@ -3483,7 +3515,7 @@ def __init__( class DatabaseInfo(_serialization.Model): """Project Database Details. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_database_name: Name of the database. Required. :vartype source_database_name: str @@ -3497,7 +3529,7 @@ class DatabaseInfo(_serialization.Model): "source_database_name": {"key": "sourceDatabaseName", "type": "str"}, } - def __init__(self, *, source_database_name: str, **kwargs): + def __init__(self, *, source_database_name: str, **kwargs: Any) -> None: """ :keyword source_database_name: Name of the database. Required. :paramtype source_database_name: str @@ -3506,37 +3538,65 @@ def __init__(self, *, source_database_name: str, **kwargs): self.source_database_name = source_database_name -class ProxyResource(_serialization.Model): - """ProxyResource. +class Resource(_serialization.Model): + """Common fields that are returned in the response for all Azure Resource Manager resources. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: + :ivar id: Fully qualified resource ID for the resource. E.g. + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long :vartype id: str - :ivar name: + :ivar name: The name of the resource. :vartype name: str - :ivar type: + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.datamigration.models.SystemData """ _validation = { "id": {"readonly": True}, "name": {"readonly": True}, "type": {"readonly": True}, + "system_data": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None self.name = None self.type = None + self.system_data = None + + +class ProxyResource(Resource): + """The resource model definition for a Azure Resource Manager proxy resource. It will not have + tags and a location. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. E.g. + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.datamigration.models.SystemData + """ class DatabaseMigration(ProxyResource): @@ -3544,13 +3604,16 @@ class DatabaseMigration(ProxyResource): Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: + :ivar id: Fully qualified resource ID for the resource. E.g. + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long :vartype id: str - :ivar name: + :ivar name: The name of the resource. :vartype name: str - :ivar type: + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". :vartype type: str - :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. :vartype system_data: ~azure.mgmt.datamigration.models.SystemData :ivar properties: Database Migration Resource properties. :vartype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationProperties @@ -3571,23 +3634,68 @@ class DatabaseMigration(ProxyResource): "properties": {"key": "properties", "type": "DatabaseMigrationProperties"}, } - def __init__(self, *, properties: Optional["_models.DatabaseMigrationProperties"] = None, **kwargs): + def __init__(self, *, properties: Optional["_models.DatabaseMigrationProperties"] = None, **kwargs: Any) -> None: """ :keyword properties: Database Migration Resource properties. :paramtype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationProperties """ super().__init__(**kwargs) - self.system_data = None self.properties = properties -class DatabaseMigrationListResult(_serialization.Model): +class DatabaseMigrationBase(ProxyResource): + """Database Migration Resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. E.g. + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.datamigration.models.SystemData + :ivar properties: Database Migration Base Resource properties. + :vartype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationBaseProperties + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "DatabaseMigrationBaseProperties"}, + } + + def __init__( + self, *, properties: Optional["_models.DatabaseMigrationBaseProperties"] = None, **kwargs: Any + ) -> None: + """ + :keyword properties: Database Migration Base Resource properties. + :paramtype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationBaseProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class DatabaseMigrationBaseListResult(_serialization.Model): """A list of Database Migrations. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: - :vartype value: list[~azure.mgmt.datamigration.models.DatabaseMigration] + :vartype value: list[~azure.mgmt.datamigration.models.DatabaseMigrationBase] :ivar next_link: :vartype next_link: str """ @@ -3598,55 +3706,47 @@ class DatabaseMigrationListResult(_serialization.Model): } _attribute_map = { - "value": {"key": "value", "type": "[DatabaseMigration]"}, + "value": {"key": "value", "type": "[DatabaseMigrationBase]"}, "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.value = None self.next_link = None -class DatabaseMigrationProperties(_serialization.Model): # pylint: disable=too-many-instance-attributes - """Database Migration Resource properties. +class DatabaseMigrationBaseProperties(_serialization.Model): + """Database Migration Base Resource properties. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - DatabaseMigrationPropertiesSqlDb, DatabaseMigrationPropertiesSqlMi, - DatabaseMigrationPropertiesSqlVm + DatabaseMigrationProperties, DatabaseMigrationPropertiesCosmosDbMongo Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :ivar kind: Required. Known values are: "SqlMi", "SqlVm", and "SqlDb". + :ivar kind: Required. Known values are: "SqlMi", "SqlVm", "SqlDb", and "MongoToCosmosDbMongo". :vartype kind: str or ~azure.mgmt.datamigration.models.ResourceType - :ivar scope: Resource Id of the target resource (SQL VM or SQL Managed Instance). + :ivar scope: Resource Id of the target resource. :vartype scope: str :ivar provisioning_state: Provisioning State of migration. ProvisioningState as Succeeded - implies that validations have been performed and migration has started. - :vartype provisioning_state: str + implies that validations have been performed and migration has started. Known values are: + "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState :ivar migration_status: Migration status. :vartype migration_status: str :ivar started_on: Database migration start time. :vartype started_on: ~datetime.datetime :ivar ended_on: Database migration end time. :vartype ended_on: ~datetime.datetime - :ivar source_sql_connection: Source SQL Server connection details. - :vartype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation - :ivar source_database_name: Name of the source database. - :vartype source_database_name: str - :ivar source_server_name: Name of the source sql server. - :vartype source_server_name: str :ivar migration_service: Resource Id of the Migration Service. :vartype migration_service: str - :ivar migration_operation_id: ID tracking current migration operation. + :ivar migration_operation_id: ID for current migration operation. :vartype migration_operation_id: str :ivar migration_failure_error: Error details in case of migration failure. :vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo - :ivar target_database_collation: Database collation to be used for the target database. - :vartype target_database_collation: str :ivar provisioning_error: Error message for migration provisioning failure, if any. :vartype provisioning_error: str """ @@ -3657,7 +3757,6 @@ class DatabaseMigrationProperties(_serialization.Model): # pylint: disable=too- "migration_status": {"readonly": True}, "started_on": {"readonly": True}, "ended_on": {"readonly": True}, - "source_server_name": {"readonly": True}, "migration_failure_error": {"readonly": True}, } @@ -3668,21 +3767,16 @@ class DatabaseMigrationProperties(_serialization.Model): # pylint: disable=too- "migration_status": {"key": "migrationStatus", "type": "str"}, "started_on": {"key": "startedOn", "type": "iso-8601"}, "ended_on": {"key": "endedOn", "type": "iso-8601"}, - "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"}, - "source_database_name": {"key": "sourceDatabaseName", "type": "str"}, - "source_server_name": {"key": "sourceServerName", "type": "str"}, "migration_service": {"key": "migrationService", "type": "str"}, "migration_operation_id": {"key": "migrationOperationId", "type": "str"}, "migration_failure_error": {"key": "migrationFailureError", "type": "ErrorInfo"}, - "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"}, "provisioning_error": {"key": "provisioningError", "type": "str"}, } _subtype_map = { "kind": { - "SqlDb": "DatabaseMigrationPropertiesSqlDb", - "SqlMi": "DatabaseMigrationPropertiesSqlMi", - "SqlVm": "DatabaseMigrationPropertiesSqlVm", + "DatabaseMigrationProperties": "DatabaseMigrationProperties", + "MongoToCosmosDbMongo": "DatabaseMigrationPropertiesCosmosDbMongo", } } @@ -3690,27 +3784,18 @@ def __init__( self, *, scope: Optional[str] = None, - source_sql_connection: Optional["_models.SqlConnectionInformation"] = None, - source_database_name: Optional[str] = None, migration_service: Optional[str] = None, migration_operation_id: Optional[str] = None, - target_database_collation: Optional[str] = None, provisioning_error: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword scope: Resource Id of the target resource (SQL VM or SQL Managed Instance). + :keyword scope: Resource Id of the target resource. :paramtype scope: str - :keyword source_sql_connection: Source SQL Server connection details. - :paramtype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation - :keyword source_database_name: Name of the source database. - :paramtype source_database_name: str :keyword migration_service: Resource Id of the Migration Service. :paramtype migration_service: str - :keyword migration_operation_id: ID tracking current migration operation. + :keyword migration_operation_id: ID for current migration operation. :paramtype migration_operation_id: str - :keyword target_database_collation: Database collation to be used for the target database. - :paramtype target_database_collation: str :keyword provisioning_error: Error message for migration provisioning failure, if any. :paramtype provisioning_error: str """ @@ -3721,182 +3806,583 @@ def __init__( self.migration_status = None self.started_on = None self.ended_on = None - self.source_sql_connection = source_sql_connection - self.source_database_name = source_database_name - self.source_server_name = None self.migration_service = migration_service self.migration_operation_id = migration_operation_id self.migration_failure_error = None - self.target_database_collation = target_database_collation self.provisioning_error = provisioning_error -class DatabaseMigrationPropertiesSqlDb(DatabaseMigrationProperties): # pylint: disable=too-many-instance-attributes - """Database Migration Resource properties for SQL database. +class DatabaseMigrationCosmosDbMongo(ProxyResource): # pylint: disable=too-many-instance-attributes + """Database Migration Resource for Mongo to CosmosDb. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :ivar kind: Required. Known values are: "SqlMi", "SqlVm", and "SqlDb". + :ivar id: Fully qualified resource ID for the resource. E.g. + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.datamigration.models.SystemData + :ivar kind: Known values are: "SqlMi", "SqlVm", "SqlDb", and "MongoToCosmosDbMongo". :vartype kind: str or ~azure.mgmt.datamigration.models.ResourceType - :ivar scope: Resource Id of the target resource (SQL VM or SQL Managed Instance). + :ivar scope: Resource Id of the target resource. :vartype scope: str :ivar provisioning_state: Provisioning State of migration. ProvisioningState as Succeeded - implies that validations have been performed and migration has started. - :vartype provisioning_state: str + implies that validations have been performed and migration has started. Known values are: + "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState :ivar migration_status: Migration status. :vartype migration_status: str :ivar started_on: Database migration start time. :vartype started_on: ~datetime.datetime :ivar ended_on: Database migration end time. :vartype ended_on: ~datetime.datetime - :ivar source_sql_connection: Source SQL Server connection details. - :vartype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation - :ivar source_database_name: Name of the source database. - :vartype source_database_name: str - :ivar source_server_name: Name of the source sql server. - :vartype source_server_name: str :ivar migration_service: Resource Id of the Migration Service. :vartype migration_service: str - :ivar migration_operation_id: ID tracking current migration operation. + :ivar migration_operation_id: ID for current migration operation. :vartype migration_operation_id: str :ivar migration_failure_error: Error details in case of migration failure. :vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo - :ivar target_database_collation: Database collation to be used for the target database. - :vartype target_database_collation: str :ivar provisioning_error: Error message for migration provisioning failure, if any. :vartype provisioning_error: str - :ivar migration_status_details: Detailed migration status. Not included by default. - :vartype migration_status_details: ~azure.mgmt.datamigration.models.SqlDbMigrationStatusDetails - :ivar target_sql_connection: Target SQL DB connection details. - :vartype target_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation - :ivar offline_configuration: Offline configuration. - :vartype offline_configuration: ~azure.mgmt.datamigration.models.SqlDbOfflineConfiguration - :ivar table_list: List of tables to copy. - :vartype table_list: list[str] + :ivar source_mongo_connection: Source Mongo connection details. + :vartype source_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation + :ivar target_mongo_connection: Target Cosmos DB Mongo connection details. + :vartype target_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation + :ivar collection_list: List of Mongo Collections to be migrated. + :vartype collection_list: list[~azure.mgmt.datamigration.models.MongoMigrationCollection] """ _validation = { - "kind": {"required": True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, "provisioning_state": {"readonly": True}, "migration_status": {"readonly": True}, "started_on": {"readonly": True}, "ended_on": {"readonly": True}, - "source_server_name": {"readonly": True}, "migration_failure_error": {"readonly": True}, - "migration_status_details": {"readonly": True}, - "offline_configuration": {"readonly": True}, } _attribute_map = { - "kind": {"key": "kind", "type": "str"}, - "scope": {"key": "scope", "type": "str"}, - "provisioning_state": {"key": "provisioningState", "type": "str"}, - "migration_status": {"key": "migrationStatus", "type": "str"}, - "started_on": {"key": "startedOn", "type": "iso-8601"}, - "ended_on": {"key": "endedOn", "type": "iso-8601"}, - "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"}, - "source_database_name": {"key": "sourceDatabaseName", "type": "str"}, - "source_server_name": {"key": "sourceServerName", "type": "str"}, - "migration_service": {"key": "migrationService", "type": "str"}, - "migration_operation_id": {"key": "migrationOperationId", "type": "str"}, - "migration_failure_error": {"key": "migrationFailureError", "type": "ErrorInfo"}, - "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"}, - "provisioning_error": {"key": "provisioningError", "type": "str"}, - "migration_status_details": {"key": "migrationStatusDetails", "type": "SqlDbMigrationStatusDetails"}, - "target_sql_connection": {"key": "targetSqlConnection", "type": "SqlConnectionInformation"}, - "offline_configuration": {"key": "offlineConfiguration", "type": "SqlDbOfflineConfiguration"}, - "table_list": {"key": "tableList", "type": "[str]"}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "kind": {"key": "properties.kind", "type": "str"}, + "scope": {"key": "properties.scope", "type": "str"}, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, + "migration_status": {"key": "properties.migrationStatus", "type": "str"}, + "started_on": {"key": "properties.startedOn", "type": "iso-8601"}, + "ended_on": {"key": "properties.endedOn", "type": "iso-8601"}, + "migration_service": {"key": "properties.migrationService", "type": "str"}, + "migration_operation_id": {"key": "properties.migrationOperationId", "type": "str"}, + "migration_failure_error": {"key": "properties.migrationFailureError", "type": "ErrorInfo"}, + "provisioning_error": {"key": "properties.provisioningError", "type": "str"}, + "source_mongo_connection": {"key": "properties.sourceMongoConnection", "type": "MongoConnectionInformation"}, + "target_mongo_connection": {"key": "properties.targetMongoConnection", "type": "MongoConnectionInformation"}, + "collection_list": {"key": "properties.collectionList", "type": "[MongoMigrationCollection]"}, } def __init__( self, *, scope: Optional[str] = None, - source_sql_connection: Optional["_models.SqlConnectionInformation"] = None, - source_database_name: Optional[str] = None, migration_service: Optional[str] = None, migration_operation_id: Optional[str] = None, - target_database_collation: Optional[str] = None, provisioning_error: Optional[str] = None, - target_sql_connection: Optional["_models.SqlConnectionInformation"] = None, - table_list: Optional[List[str]] = None, - **kwargs - ): + source_mongo_connection: Optional["_models.MongoConnectionInformation"] = None, + target_mongo_connection: Optional["_models.MongoConnectionInformation"] = None, + collection_list: Optional[List["_models.MongoMigrationCollection"]] = None, + **kwargs: Any + ) -> None: """ - :keyword scope: Resource Id of the target resource (SQL VM or SQL Managed Instance). + :keyword scope: Resource Id of the target resource. :paramtype scope: str - :keyword source_sql_connection: Source SQL Server connection details. - :paramtype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation - :keyword source_database_name: Name of the source database. - :paramtype source_database_name: str :keyword migration_service: Resource Id of the Migration Service. :paramtype migration_service: str - :keyword migration_operation_id: ID tracking current migration operation. + :keyword migration_operation_id: ID for current migration operation. :paramtype migration_operation_id: str - :keyword target_database_collation: Database collation to be used for the target database. - :paramtype target_database_collation: str :keyword provisioning_error: Error message for migration provisioning failure, if any. :paramtype provisioning_error: str - :keyword target_sql_connection: Target SQL DB connection details. - :paramtype target_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation - :keyword table_list: List of tables to copy. - :paramtype table_list: list[str] + :keyword source_mongo_connection: Source Mongo connection details. + :paramtype source_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation + :keyword target_mongo_connection: Target Cosmos DB Mongo connection details. + :paramtype target_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation + :keyword collection_list: List of Mongo Collections to be migrated. + :paramtype collection_list: list[~azure.mgmt.datamigration.models.MongoMigrationCollection] """ - super().__init__( - scope=scope, - source_sql_connection=source_sql_connection, - source_database_name=source_database_name, - migration_service=migration_service, - migration_operation_id=migration_operation_id, - target_database_collation=target_database_collation, - provisioning_error=provisioning_error, - **kwargs - ) - self.kind: str = "SqlDb" - self.migration_status_details = None - self.target_sql_connection = target_sql_connection - self.offline_configuration = None - self.table_list = table_list + super().__init__(**kwargs) + self.kind: Optional[str] = None + self.scope = scope + self.provisioning_state = None + self.migration_status = None + self.started_on = None + self.ended_on = None + self.migration_service = migration_service + self.migration_operation_id = migration_operation_id + self.migration_failure_error = None + self.provisioning_error = provisioning_error + self.source_mongo_connection = source_mongo_connection + self.target_mongo_connection = target_mongo_connection + self.collection_list = collection_list -class DatabaseMigrationPropertiesSqlMi(DatabaseMigrationProperties): # pylint: disable=too-many-instance-attributes - """Database Migration Resource properties for SQL Managed Instance. +class DatabaseMigrationCosmosDbMongoListResult(_serialization.Model): + """A list of Database Migrations. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + :ivar value: + :vartype value: list[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :ivar next_link: + :vartype next_link: str + """ + + _validation = { + "value": {"readonly": True}, + "next_link": {"readonly": True}, + } + + _attribute_map = { + "value": {"key": "value", "type": "[DatabaseMigrationCosmosDbMongo]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.value = None + self.next_link = None + + +class DatabaseMigrationListResult(_serialization.Model): + """A list of Database Migrations. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: + :vartype value: list[~azure.mgmt.datamigration.models.DatabaseMigration] + :ivar next_link: + :vartype next_link: str + """ + + _validation = { + "value": {"readonly": True}, + "next_link": {"readonly": True}, + } + + _attribute_map = { + "value": {"key": "value", "type": "[DatabaseMigration]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.value = None + self.next_link = None + + +class DatabaseMigrationProperties(DatabaseMigrationBaseProperties): # pylint: disable=too-many-instance-attributes + """Database Migration Resource properties. - :ivar kind: Required. Known values are: "SqlMi", "SqlVm", and "SqlDb". + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + DatabaseMigrationPropertiesSqlDb, DatabaseMigrationPropertiesSqlMi, + DatabaseMigrationPropertiesSqlVm + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to server. + + :ivar kind: Required. Known values are: "SqlMi", "SqlVm", "SqlDb", and "MongoToCosmosDbMongo". :vartype kind: str or ~azure.mgmt.datamigration.models.ResourceType - :ivar scope: Resource Id of the target resource (SQL VM or SQL Managed Instance). + :ivar scope: Resource Id of the target resource. :vartype scope: str :ivar provisioning_state: Provisioning State of migration. ProvisioningState as Succeeded - implies that validations have been performed and migration has started. - :vartype provisioning_state: str + implies that validations have been performed and migration has started. Known values are: + "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState :ivar migration_status: Migration status. :vartype migration_status: str :ivar started_on: Database migration start time. :vartype started_on: ~datetime.datetime :ivar ended_on: Database migration end time. :vartype ended_on: ~datetime.datetime - :ivar source_sql_connection: Source SQL Server connection details. - :vartype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation - :ivar source_database_name: Name of the source database. - :vartype source_database_name: str - :ivar source_server_name: Name of the source sql server. - :vartype source_server_name: str :ivar migration_service: Resource Id of the Migration Service. :vartype migration_service: str - :ivar migration_operation_id: ID tracking current migration operation. + :ivar migration_operation_id: ID for current migration operation. :vartype migration_operation_id: str :ivar migration_failure_error: Error details in case of migration failure. :vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo - :ivar target_database_collation: Database collation to be used for the target database. - :vartype target_database_collation: str :ivar provisioning_error: Error message for migration provisioning failure, if any. :vartype provisioning_error: str + :ivar source_sql_connection: Source SQL Server connection details. + :vartype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation + :ivar source_database_name: Name of the source database. + :vartype source_database_name: str + :ivar source_server_name: Name of the source sql server. + :vartype source_server_name: str + :ivar target_database_collation: Database collation to be used for the target database. + :vartype target_database_collation: str + """ + + _validation = { + "kind": {"required": True}, + "provisioning_state": {"readonly": True}, + "migration_status": {"readonly": True}, + "started_on": {"readonly": True}, + "ended_on": {"readonly": True}, + "migration_failure_error": {"readonly": True}, + "source_server_name": {"readonly": True}, + } + + _attribute_map = { + "kind": {"key": "kind", "type": "str"}, + "scope": {"key": "scope", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "migration_status": {"key": "migrationStatus", "type": "str"}, + "started_on": {"key": "startedOn", "type": "iso-8601"}, + "ended_on": {"key": "endedOn", "type": "iso-8601"}, + "migration_service": {"key": "migrationService", "type": "str"}, + "migration_operation_id": {"key": "migrationOperationId", "type": "str"}, + "migration_failure_error": {"key": "migrationFailureError", "type": "ErrorInfo"}, + "provisioning_error": {"key": "provisioningError", "type": "str"}, + "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"}, + "source_database_name": {"key": "sourceDatabaseName", "type": "str"}, + "source_server_name": {"key": "sourceServerName", "type": "str"}, + "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"}, + } + + _subtype_map = { + "kind": { + "SqlDb": "DatabaseMigrationPropertiesSqlDb", + "SqlMi": "DatabaseMigrationPropertiesSqlMi", + "SqlVm": "DatabaseMigrationPropertiesSqlVm", + } + } + + def __init__( + self, + *, + scope: Optional[str] = None, + migration_service: Optional[str] = None, + migration_operation_id: Optional[str] = None, + provisioning_error: Optional[str] = None, + source_sql_connection: Optional["_models.SqlConnectionInformation"] = None, + source_database_name: Optional[str] = None, + target_database_collation: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword scope: Resource Id of the target resource. + :paramtype scope: str + :keyword migration_service: Resource Id of the Migration Service. + :paramtype migration_service: str + :keyword migration_operation_id: ID for current migration operation. + :paramtype migration_operation_id: str + :keyword provisioning_error: Error message for migration provisioning failure, if any. + :paramtype provisioning_error: str + :keyword source_sql_connection: Source SQL Server connection details. + :paramtype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation + :keyword source_database_name: Name of the source database. + :paramtype source_database_name: str + :keyword target_database_collation: Database collation to be used for the target database. + :paramtype target_database_collation: str + """ + super().__init__( + scope=scope, + migration_service=migration_service, + migration_operation_id=migration_operation_id, + provisioning_error=provisioning_error, + **kwargs + ) + self.kind: str = "DatabaseMigrationProperties" + self.source_sql_connection = source_sql_connection + self.source_database_name = source_database_name + self.source_server_name = None + self.target_database_collation = target_database_collation + + +class DatabaseMigrationPropertiesCosmosDbMongo( + DatabaseMigrationBaseProperties +): # pylint: disable=too-many-instance-attributes + """Database Migration Resource properties for CosmosDb for Mongo. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to server. + + :ivar kind: Required. Known values are: "SqlMi", "SqlVm", "SqlDb", and "MongoToCosmosDbMongo". + :vartype kind: str or ~azure.mgmt.datamigration.models.ResourceType + :ivar scope: Resource Id of the target resource. + :vartype scope: str + :ivar provisioning_state: Provisioning State of migration. ProvisioningState as Succeeded + implies that validations have been performed and migration has started. Known values are: + "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState + :ivar migration_status: Migration status. + :vartype migration_status: str + :ivar started_on: Database migration start time. + :vartype started_on: ~datetime.datetime + :ivar ended_on: Database migration end time. + :vartype ended_on: ~datetime.datetime + :ivar migration_service: Resource Id of the Migration Service. + :vartype migration_service: str + :ivar migration_operation_id: ID for current migration operation. + :vartype migration_operation_id: str + :ivar migration_failure_error: Error details in case of migration failure. + :vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo + :ivar provisioning_error: Error message for migration provisioning failure, if any. + :vartype provisioning_error: str + :ivar source_mongo_connection: Source Mongo connection details. + :vartype source_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation + :ivar target_mongo_connection: Target Cosmos DB Mongo connection details. + :vartype target_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation + :ivar collection_list: List of Mongo Collections to be migrated. + :vartype collection_list: list[~azure.mgmt.datamigration.models.MongoMigrationCollection] + """ + + _validation = { + "kind": {"required": True}, + "provisioning_state": {"readonly": True}, + "migration_status": {"readonly": True}, + "started_on": {"readonly": True}, + "ended_on": {"readonly": True}, + "migration_failure_error": {"readonly": True}, + } + + _attribute_map = { + "kind": {"key": "kind", "type": "str"}, + "scope": {"key": "scope", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "migration_status": {"key": "migrationStatus", "type": "str"}, + "started_on": {"key": "startedOn", "type": "iso-8601"}, + "ended_on": {"key": "endedOn", "type": "iso-8601"}, + "migration_service": {"key": "migrationService", "type": "str"}, + "migration_operation_id": {"key": "migrationOperationId", "type": "str"}, + "migration_failure_error": {"key": "migrationFailureError", "type": "ErrorInfo"}, + "provisioning_error": {"key": "provisioningError", "type": "str"}, + "source_mongo_connection": {"key": "sourceMongoConnection", "type": "MongoConnectionInformation"}, + "target_mongo_connection": {"key": "targetMongoConnection", "type": "MongoConnectionInformation"}, + "collection_list": {"key": "collectionList", "type": "[MongoMigrationCollection]"}, + } + + def __init__( + self, + *, + scope: Optional[str] = None, + migration_service: Optional[str] = None, + migration_operation_id: Optional[str] = None, + provisioning_error: Optional[str] = None, + source_mongo_connection: Optional["_models.MongoConnectionInformation"] = None, + target_mongo_connection: Optional["_models.MongoConnectionInformation"] = None, + collection_list: Optional[List["_models.MongoMigrationCollection"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword scope: Resource Id of the target resource. + :paramtype scope: str + :keyword migration_service: Resource Id of the Migration Service. + :paramtype migration_service: str + :keyword migration_operation_id: ID for current migration operation. + :paramtype migration_operation_id: str + :keyword provisioning_error: Error message for migration provisioning failure, if any. + :paramtype provisioning_error: str + :keyword source_mongo_connection: Source Mongo connection details. + :paramtype source_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation + :keyword target_mongo_connection: Target Cosmos DB Mongo connection details. + :paramtype target_mongo_connection: ~azure.mgmt.datamigration.models.MongoConnectionInformation + :keyword collection_list: List of Mongo Collections to be migrated. + :paramtype collection_list: list[~azure.mgmt.datamigration.models.MongoMigrationCollection] + """ + super().__init__( + scope=scope, + migration_service=migration_service, + migration_operation_id=migration_operation_id, + provisioning_error=provisioning_error, + **kwargs + ) + self.kind: str = "MongoToCosmosDbMongo" + self.source_mongo_connection = source_mongo_connection + self.target_mongo_connection = target_mongo_connection + self.collection_list = collection_list + + +class DatabaseMigrationPropertiesSqlDb(DatabaseMigrationProperties): # pylint: disable=too-many-instance-attributes + """Database Migration Resource properties for SQL database. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to server. + + :ivar kind: Required. Known values are: "SqlMi", "SqlVm", "SqlDb", and "MongoToCosmosDbMongo". + :vartype kind: str or ~azure.mgmt.datamigration.models.ResourceType + :ivar scope: Resource Id of the target resource. + :vartype scope: str + :ivar provisioning_state: Provisioning State of migration. ProvisioningState as Succeeded + implies that validations have been performed and migration has started. Known values are: + "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState + :ivar migration_status: Migration status. + :vartype migration_status: str + :ivar started_on: Database migration start time. + :vartype started_on: ~datetime.datetime + :ivar ended_on: Database migration end time. + :vartype ended_on: ~datetime.datetime + :ivar migration_service: Resource Id of the Migration Service. + :vartype migration_service: str + :ivar migration_operation_id: ID for current migration operation. + :vartype migration_operation_id: str + :ivar migration_failure_error: Error details in case of migration failure. + :vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo + :ivar provisioning_error: Error message for migration provisioning failure, if any. + :vartype provisioning_error: str + :ivar source_sql_connection: Source SQL Server connection details. + :vartype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation + :ivar source_database_name: Name of the source database. + :vartype source_database_name: str + :ivar source_server_name: Name of the source sql server. + :vartype source_server_name: str + :ivar target_database_collation: Database collation to be used for the target database. + :vartype target_database_collation: str + :ivar migration_status_details: Detailed migration status. Not included by default. + :vartype migration_status_details: ~azure.mgmt.datamigration.models.SqlDbMigrationStatusDetails + :ivar target_sql_connection: Target SQL DB connection details. + :vartype target_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation + :ivar offline_configuration: Offline configuration. + :vartype offline_configuration: ~azure.mgmt.datamigration.models.SqlDbOfflineConfiguration + :ivar table_list: List of tables to copy. + :vartype table_list: list[str] + """ + + _validation = { + "kind": {"required": True}, + "provisioning_state": {"readonly": True}, + "migration_status": {"readonly": True}, + "started_on": {"readonly": True}, + "ended_on": {"readonly": True}, + "migration_failure_error": {"readonly": True}, + "source_server_name": {"readonly": True}, + "migration_status_details": {"readonly": True}, + "offline_configuration": {"readonly": True}, + } + + _attribute_map = { + "kind": {"key": "kind", "type": "str"}, + "scope": {"key": "scope", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "migration_status": {"key": "migrationStatus", "type": "str"}, + "started_on": {"key": "startedOn", "type": "iso-8601"}, + "ended_on": {"key": "endedOn", "type": "iso-8601"}, + "migration_service": {"key": "migrationService", "type": "str"}, + "migration_operation_id": {"key": "migrationOperationId", "type": "str"}, + "migration_failure_error": {"key": "migrationFailureError", "type": "ErrorInfo"}, + "provisioning_error": {"key": "provisioningError", "type": "str"}, + "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"}, + "source_database_name": {"key": "sourceDatabaseName", "type": "str"}, + "source_server_name": {"key": "sourceServerName", "type": "str"}, + "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"}, + "migration_status_details": {"key": "migrationStatusDetails", "type": "SqlDbMigrationStatusDetails"}, + "target_sql_connection": {"key": "targetSqlConnection", "type": "SqlConnectionInformation"}, + "offline_configuration": {"key": "offlineConfiguration", "type": "SqlDbOfflineConfiguration"}, + "table_list": {"key": "tableList", "type": "[str]"}, + } + + def __init__( + self, + *, + scope: Optional[str] = None, + migration_service: Optional[str] = None, + migration_operation_id: Optional[str] = None, + provisioning_error: Optional[str] = None, + source_sql_connection: Optional["_models.SqlConnectionInformation"] = None, + source_database_name: Optional[str] = None, + target_database_collation: Optional[str] = None, + target_sql_connection: Optional["_models.SqlConnectionInformation"] = None, + table_list: Optional[List[str]] = None, + **kwargs: Any + ) -> None: + """ + :keyword scope: Resource Id of the target resource. + :paramtype scope: str + :keyword migration_service: Resource Id of the Migration Service. + :paramtype migration_service: str + :keyword migration_operation_id: ID for current migration operation. + :paramtype migration_operation_id: str + :keyword provisioning_error: Error message for migration provisioning failure, if any. + :paramtype provisioning_error: str + :keyword source_sql_connection: Source SQL Server connection details. + :paramtype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation + :keyword source_database_name: Name of the source database. + :paramtype source_database_name: str + :keyword target_database_collation: Database collation to be used for the target database. + :paramtype target_database_collation: str + :keyword target_sql_connection: Target SQL DB connection details. + :paramtype target_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation + :keyword table_list: List of tables to copy. + :paramtype table_list: list[str] + """ + super().__init__( + scope=scope, + migration_service=migration_service, + migration_operation_id=migration_operation_id, + provisioning_error=provisioning_error, + source_sql_connection=source_sql_connection, + source_database_name=source_database_name, + target_database_collation=target_database_collation, + **kwargs + ) + self.kind: str = "SqlDb" + self.migration_status_details = None + self.target_sql_connection = target_sql_connection + self.offline_configuration = None + self.table_list = table_list + + +class DatabaseMigrationPropertiesSqlMi(DatabaseMigrationProperties): # pylint: disable=too-many-instance-attributes + """Database Migration Resource properties for SQL Managed Instance. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to server. + + :ivar kind: Required. Known values are: "SqlMi", "SqlVm", "SqlDb", and "MongoToCosmosDbMongo". + :vartype kind: str or ~azure.mgmt.datamigration.models.ResourceType + :ivar scope: Resource Id of the target resource. + :vartype scope: str + :ivar provisioning_state: Provisioning State of migration. ProvisioningState as Succeeded + implies that validations have been performed and migration has started. Known values are: + "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState + :ivar migration_status: Migration status. + :vartype migration_status: str + :ivar started_on: Database migration start time. + :vartype started_on: ~datetime.datetime + :ivar ended_on: Database migration end time. + :vartype ended_on: ~datetime.datetime + :ivar migration_service: Resource Id of the Migration Service. + :vartype migration_service: str + :ivar migration_operation_id: ID for current migration operation. + :vartype migration_operation_id: str + :ivar migration_failure_error: Error details in case of migration failure. + :vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo + :ivar provisioning_error: Error message for migration provisioning failure, if any. + :vartype provisioning_error: str + :ivar source_sql_connection: Source SQL Server connection details. + :vartype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation + :ivar source_database_name: Name of the source database. + :vartype source_database_name: str + :ivar source_server_name: Name of the source sql server. + :vartype source_server_name: str + :ivar target_database_collation: Database collation to be used for the target database. + :vartype target_database_collation: str :ivar migration_status_details: Detailed migration status. Not included by default. :vartype migration_status_details: ~azure.mgmt.datamigration.models.MigrationStatusDetails :ivar backup_configuration: Backup configuration info. @@ -3911,8 +4397,8 @@ class DatabaseMigrationPropertiesSqlMi(DatabaseMigrationProperties): # pylint: "migration_status": {"readonly": True}, "started_on": {"readonly": True}, "ended_on": {"readonly": True}, - "source_server_name": {"readonly": True}, "migration_failure_error": {"readonly": True}, + "source_server_name": {"readonly": True}, "migration_status_details": {"readonly": True}, } @@ -3923,14 +4409,14 @@ class DatabaseMigrationPropertiesSqlMi(DatabaseMigrationProperties): # pylint: "migration_status": {"key": "migrationStatus", "type": "str"}, "started_on": {"key": "startedOn", "type": "iso-8601"}, "ended_on": {"key": "endedOn", "type": "iso-8601"}, - "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"}, - "source_database_name": {"key": "sourceDatabaseName", "type": "str"}, - "source_server_name": {"key": "sourceServerName", "type": "str"}, "migration_service": {"key": "migrationService", "type": "str"}, "migration_operation_id": {"key": "migrationOperationId", "type": "str"}, "migration_failure_error": {"key": "migrationFailureError", "type": "ErrorInfo"}, - "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"}, "provisioning_error": {"key": "provisioningError", "type": "str"}, + "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"}, + "source_database_name": {"key": "sourceDatabaseName", "type": "str"}, + "source_server_name": {"key": "sourceServerName", "type": "str"}, + "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"}, "migration_status_details": {"key": "migrationStatusDetails", "type": "MigrationStatusDetails"}, "backup_configuration": {"key": "backupConfiguration", "type": "BackupConfiguration"}, "offline_configuration": {"key": "offlineConfiguration", "type": "OfflineConfiguration"}, @@ -3940,31 +4426,31 @@ def __init__( self, *, scope: Optional[str] = None, - source_sql_connection: Optional["_models.SqlConnectionInformation"] = None, - source_database_name: Optional[str] = None, migration_service: Optional[str] = None, migration_operation_id: Optional[str] = None, - target_database_collation: Optional[str] = None, provisioning_error: Optional[str] = None, + source_sql_connection: Optional["_models.SqlConnectionInformation"] = None, + source_database_name: Optional[str] = None, + target_database_collation: Optional[str] = None, backup_configuration: Optional["_models.BackupConfiguration"] = None, offline_configuration: Optional["_models.OfflineConfiguration"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword scope: Resource Id of the target resource (SQL VM or SQL Managed Instance). + :keyword scope: Resource Id of the target resource. :paramtype scope: str + :keyword migration_service: Resource Id of the Migration Service. + :paramtype migration_service: str + :keyword migration_operation_id: ID for current migration operation. + :paramtype migration_operation_id: str + :keyword provisioning_error: Error message for migration provisioning failure, if any. + :paramtype provisioning_error: str :keyword source_sql_connection: Source SQL Server connection details. :paramtype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation :keyword source_database_name: Name of the source database. :paramtype source_database_name: str - :keyword migration_service: Resource Id of the Migration Service. - :paramtype migration_service: str - :keyword migration_operation_id: ID tracking current migration operation. - :paramtype migration_operation_id: str :keyword target_database_collation: Database collation to be used for the target database. :paramtype target_database_collation: str - :keyword provisioning_error: Error message for migration provisioning failure, if any. - :paramtype provisioning_error: str :keyword backup_configuration: Backup configuration info. :paramtype backup_configuration: ~azure.mgmt.datamigration.models.BackupConfiguration :keyword offline_configuration: Offline configuration. @@ -3972,12 +4458,12 @@ def __init__( """ super().__init__( scope=scope, - source_sql_connection=source_sql_connection, - source_database_name=source_database_name, migration_service=migration_service, migration_operation_id=migration_operation_id, - target_database_collation=target_database_collation, provisioning_error=provisioning_error, + source_sql_connection=source_sql_connection, + source_database_name=source_database_name, + target_database_collation=target_database_collation, **kwargs ) self.kind: str = "SqlMi" @@ -3991,37 +4477,38 @@ class DatabaseMigrationPropertiesSqlVm(DatabaseMigrationProperties): # pylint: Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. - :ivar kind: Required. Known values are: "SqlMi", "SqlVm", and "SqlDb". + :ivar kind: Required. Known values are: "SqlMi", "SqlVm", "SqlDb", and "MongoToCosmosDbMongo". :vartype kind: str or ~azure.mgmt.datamigration.models.ResourceType - :ivar scope: Resource Id of the target resource (SQL VM or SQL Managed Instance). + :ivar scope: Resource Id of the target resource. :vartype scope: str :ivar provisioning_state: Provisioning State of migration. ProvisioningState as Succeeded - implies that validations have been performed and migration has started. - :vartype provisioning_state: str + implies that validations have been performed and migration has started. Known values are: + "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState :ivar migration_status: Migration status. :vartype migration_status: str :ivar started_on: Database migration start time. :vartype started_on: ~datetime.datetime :ivar ended_on: Database migration end time. :vartype ended_on: ~datetime.datetime + :ivar migration_service: Resource Id of the Migration Service. + :vartype migration_service: str + :ivar migration_operation_id: ID for current migration operation. + :vartype migration_operation_id: str + :ivar migration_failure_error: Error details in case of migration failure. + :vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo + :ivar provisioning_error: Error message for migration provisioning failure, if any. + :vartype provisioning_error: str :ivar source_sql_connection: Source SQL Server connection details. :vartype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation :ivar source_database_name: Name of the source database. :vartype source_database_name: str :ivar source_server_name: Name of the source sql server. :vartype source_server_name: str - :ivar migration_service: Resource Id of the Migration Service. - :vartype migration_service: str - :ivar migration_operation_id: ID tracking current migration operation. - :vartype migration_operation_id: str - :ivar migration_failure_error: Error details in case of migration failure. - :vartype migration_failure_error: ~azure.mgmt.datamigration.models.ErrorInfo :ivar target_database_collation: Database collation to be used for the target database. :vartype target_database_collation: str - :ivar provisioning_error: Error message for migration provisioning failure, if any. - :vartype provisioning_error: str :ivar migration_status_details: Detailed migration status. Not included by default. :vartype migration_status_details: ~azure.mgmt.datamigration.models.MigrationStatusDetails :ivar backup_configuration: Backup configuration info. @@ -4036,8 +4523,8 @@ class DatabaseMigrationPropertiesSqlVm(DatabaseMigrationProperties): # pylint: "migration_status": {"readonly": True}, "started_on": {"readonly": True}, "ended_on": {"readonly": True}, - "source_server_name": {"readonly": True}, "migration_failure_error": {"readonly": True}, + "source_server_name": {"readonly": True}, "migration_status_details": {"readonly": True}, } @@ -4048,14 +4535,14 @@ class DatabaseMigrationPropertiesSqlVm(DatabaseMigrationProperties): # pylint: "migration_status": {"key": "migrationStatus", "type": "str"}, "started_on": {"key": "startedOn", "type": "iso-8601"}, "ended_on": {"key": "endedOn", "type": "iso-8601"}, - "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"}, - "source_database_name": {"key": "sourceDatabaseName", "type": "str"}, - "source_server_name": {"key": "sourceServerName", "type": "str"}, "migration_service": {"key": "migrationService", "type": "str"}, "migration_operation_id": {"key": "migrationOperationId", "type": "str"}, "migration_failure_error": {"key": "migrationFailureError", "type": "ErrorInfo"}, - "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"}, "provisioning_error": {"key": "provisioningError", "type": "str"}, + "source_sql_connection": {"key": "sourceSqlConnection", "type": "SqlConnectionInformation"}, + "source_database_name": {"key": "sourceDatabaseName", "type": "str"}, + "source_server_name": {"key": "sourceServerName", "type": "str"}, + "target_database_collation": {"key": "targetDatabaseCollation", "type": "str"}, "migration_status_details": {"key": "migrationStatusDetails", "type": "MigrationStatusDetails"}, "backup_configuration": {"key": "backupConfiguration", "type": "BackupConfiguration"}, "offline_configuration": {"key": "offlineConfiguration", "type": "OfflineConfiguration"}, @@ -4065,31 +4552,31 @@ def __init__( self, *, scope: Optional[str] = None, - source_sql_connection: Optional["_models.SqlConnectionInformation"] = None, - source_database_name: Optional[str] = None, migration_service: Optional[str] = None, migration_operation_id: Optional[str] = None, - target_database_collation: Optional[str] = None, provisioning_error: Optional[str] = None, + source_sql_connection: Optional["_models.SqlConnectionInformation"] = None, + source_database_name: Optional[str] = None, + target_database_collation: Optional[str] = None, backup_configuration: Optional["_models.BackupConfiguration"] = None, offline_configuration: Optional["_models.OfflineConfiguration"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword scope: Resource Id of the target resource (SQL VM or SQL Managed Instance). + :keyword scope: Resource Id of the target resource. :paramtype scope: str + :keyword migration_service: Resource Id of the Migration Service. + :paramtype migration_service: str + :keyword migration_operation_id: ID for current migration operation. + :paramtype migration_operation_id: str + :keyword provisioning_error: Error message for migration provisioning failure, if any. + :paramtype provisioning_error: str :keyword source_sql_connection: Source SQL Server connection details. :paramtype source_sql_connection: ~azure.mgmt.datamigration.models.SqlConnectionInformation :keyword source_database_name: Name of the source database. :paramtype source_database_name: str - :keyword migration_service: Resource Id of the Migration Service. - :paramtype migration_service: str - :keyword migration_operation_id: ID tracking current migration operation. - :paramtype migration_operation_id: str :keyword target_database_collation: Database collation to be used for the target database. :paramtype target_database_collation: str - :keyword provisioning_error: Error message for migration provisioning failure, if any. - :paramtype provisioning_error: str :keyword backup_configuration: Backup configuration info. :paramtype backup_configuration: ~azure.mgmt.datamigration.models.BackupConfiguration :keyword offline_configuration: Offline configuration. @@ -4097,12 +4584,12 @@ def __init__( """ super().__init__( scope=scope, - source_sql_connection=source_sql_connection, - source_database_name=source_database_name, migration_service=migration_service, migration_operation_id=migration_operation_id, - target_database_collation=target_database_collation, provisioning_error=provisioning_error, + source_sql_connection=source_sql_connection, + source_database_name=source_database_name, + target_database_collation=target_database_collation, **kwargs ) self.kind: str = "SqlVm" @@ -4111,7 +4598,45 @@ def __init__( self.offline_configuration = offline_configuration -class DatabaseMigrationSqlDb(ProxyResource): +class ProxyResourceAutoGenerated(_serialization.Model): + """ProxyResourceAutoGenerated. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: + :vartype id: str + :ivar name: + :vartype name: str + :ivar type: + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.system_data = None + + +class DatabaseMigrationSqlDb(ProxyResourceAutoGenerated): """Database Migration Resource for SQL Database. Variables are only populated by the server, and will be ignored when sending a request. @@ -4123,7 +4648,7 @@ class DatabaseMigrationSqlDb(ProxyResource): :ivar type: :vartype type: str :ivar system_data: Metadata pertaining to creation and last modification of the resource. - :vartype system_data: ~azure.mgmt.datamigration.models.SystemData + :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated :ivar properties: Database Migration Resource properties for SQL database. :vartype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationPropertiesSqlDb """ @@ -4139,21 +4664,22 @@ class DatabaseMigrationSqlDb(ProxyResource): "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, + "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"}, "properties": {"key": "properties", "type": "DatabaseMigrationPropertiesSqlDb"}, } - def __init__(self, *, properties: Optional["_models.DatabaseMigrationPropertiesSqlDb"] = None, **kwargs): + def __init__( + self, *, properties: Optional["_models.DatabaseMigrationPropertiesSqlDb"] = None, **kwargs: Any + ) -> None: """ :keyword properties: Database Migration Resource properties for SQL database. :paramtype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationPropertiesSqlDb """ super().__init__(**kwargs) - self.system_data = None self.properties = properties -class DatabaseMigrationSqlMi(ProxyResource): +class DatabaseMigrationSqlMi(ProxyResourceAutoGenerated): """Database Migration Resource for SQL Managed Instance. Variables are only populated by the server, and will be ignored when sending a request. @@ -4165,7 +4691,7 @@ class DatabaseMigrationSqlMi(ProxyResource): :ivar type: :vartype type: str :ivar system_data: Metadata pertaining to creation and last modification of the resource. - :vartype system_data: ~azure.mgmt.datamigration.models.SystemData + :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated :ivar properties: Database Migration Resource properties for SQL Managed Instance. :vartype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationPropertiesSqlMi """ @@ -4181,21 +4707,22 @@ class DatabaseMigrationSqlMi(ProxyResource): "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, + "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"}, "properties": {"key": "properties", "type": "DatabaseMigrationPropertiesSqlMi"}, } - def __init__(self, *, properties: Optional["_models.DatabaseMigrationPropertiesSqlMi"] = None, **kwargs): + def __init__( + self, *, properties: Optional["_models.DatabaseMigrationPropertiesSqlMi"] = None, **kwargs: Any + ) -> None: """ :keyword properties: Database Migration Resource properties for SQL Managed Instance. :paramtype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationPropertiesSqlMi """ super().__init__(**kwargs) - self.system_data = None self.properties = properties -class DatabaseMigrationSqlVm(ProxyResource): +class DatabaseMigrationSqlVm(ProxyResourceAutoGenerated): """Database Migration Resource for SQL Virtual Machine. Variables are only populated by the server, and will be ignored when sending a request. @@ -4207,7 +4734,7 @@ class DatabaseMigrationSqlVm(ProxyResource): :ivar type: :vartype type: str :ivar system_data: Metadata pertaining to creation and last modification of the resource. - :vartype system_data: ~azure.mgmt.datamigration.models.SystemData + :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated :ivar properties: Database Migration Resource properties for SQL Virtual Machine. :vartype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationPropertiesSqlVm """ @@ -4223,17 +4750,18 @@ class DatabaseMigrationSqlVm(ProxyResource): "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, + "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"}, "properties": {"key": "properties", "type": "DatabaseMigrationPropertiesSqlVm"}, } - def __init__(self, *, properties: Optional["_models.DatabaseMigrationPropertiesSqlVm"] = None, **kwargs): + def __init__( + self, *, properties: Optional["_models.DatabaseMigrationPropertiesSqlVm"] = None, **kwargs: Any + ) -> None: """ :keyword properties: Database Migration Resource properties for SQL Virtual Machine. :paramtype properties: ~azure.mgmt.datamigration.models.DatabaseMigrationPropertiesSqlVm """ super().__init__(**kwargs) - self.system_data = None self.properties = properties @@ -4266,7 +4794,7 @@ class DatabaseObjectName(_serialization.Model): "object_type": {"key": "objectType", "type": "str"}, } - def __init__(self, *, object_type: Optional[Union[str, "_models.ObjectType"]] = None, **kwargs): + def __init__(self, *, object_type: Optional[Union[str, "_models.ObjectType"]] = None, **kwargs: Any) -> None: """ :keyword object_type: Type of the object in the database. Known values are: "StoredProcedures", "Table", "User", "View", and "Function". @@ -4330,7 +4858,7 @@ class DataItemMigrationSummaryResult(_serialization.Model): "result_prefix": {"key": "resultPrefix", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.name = None @@ -4399,7 +4927,7 @@ class DatabaseSummaryResult(DataItemMigrationSummaryResult): "size_mb": {"key": "sizeMB", "type": "float"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.size_mb = None @@ -4426,7 +4954,7 @@ class DatabaseTable(_serialization.Model): "name": {"key": "name", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.has_rows = None @@ -4453,8 +4981,8 @@ def __init__( *, failed_objects: Optional[Dict[str, str]] = None, validation_errors: Optional["_models.ValidationError"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword failed_objects: List of failed table names of source and target pair. :paramtype failed_objects: dict[str, str] @@ -4487,7 +5015,7 @@ class DataMigrationError(_serialization.Model): "type": {"key": "type", "type": "str"}, } - def __init__(self, *, type: Optional[Union[str, "_models.ErrorType"]] = None, **kwargs): + def __init__(self, *, type: Optional[Union[str, "_models.ErrorType"]] = None, **kwargs: Any) -> None: """ :keyword type: Error type. Known values are: "Default", "Warning", and "Error". :paramtype type: str or ~azure.mgmt.datamigration.models.ErrorType @@ -4543,7 +5071,7 @@ class DataMigrationProjectMetadata(_serialization.Model): "selected_migration_tables": {"key": "selectedMigrationTables", "type": "[MigrationTableMetadata]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.source_server_name = None @@ -4556,8 +5084,8 @@ def __init__(self, **kwargs): self.selected_migration_tables = None -class TrackedResource(_serialization.Model): - """TrackedResource. +class TrackedResourceAutoGenerated(_serialization.Model): + """TrackedResourceAutoGenerated. Variables are only populated by the server, and will be ignored when sending a request. @@ -4572,7 +5100,7 @@ class TrackedResource(_serialization.Model): :ivar type: :vartype type: str :ivar system_data: - :vartype system_data: ~azure.mgmt.datamigration.models.SystemData + :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated """ _validation = { @@ -4588,10 +5116,10 @@ class TrackedResource(_serialization.Model): "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, + "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"}, } - def __init__(self, *, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs): + def __init__(self, *, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: """ :keyword location: :paramtype location: str @@ -4607,8 +5135,8 @@ def __init__(self, *, location: Optional[str] = None, tags: Optional[Dict[str, s self.system_data = None -class DataMigrationService(TrackedResource): # pylint: disable=too-many-instance-attributes - """A Database Migration Service resource. +class DataMigrationService(TrackedResourceAutoGenerated): # pylint: disable=too-many-instance-attributes + """An Azure Database Migration Service (classic) resource. Variables are only populated by the server, and will be ignored when sending a request. @@ -4623,7 +5151,7 @@ class DataMigrationService(TrackedResource): # pylint: disable=too-many-instanc :ivar type: :vartype type: str :ivar system_data: - :vartype system_data: ~azure.mgmt.datamigration.models.SystemData + :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated :ivar etag: HTTP strong entity tag value. Ignored if submitted. :vartype etag: str :ivar kind: The resource kind. Only 'vm' (the default) is supported. @@ -4663,7 +5191,7 @@ class DataMigrationService(TrackedResource): # pylint: disable=too-many-instanc "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, + "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"}, "etag": {"key": "etag", "type": "str"}, "kind": {"key": "kind", "type": "str"}, "sku": {"key": "sku", "type": "ServiceSku"}, @@ -4688,8 +5216,8 @@ def __init__( virtual_nic_id: Optional[str] = None, auto_stop_delay: Optional[str] = None, delete_resources_on_stop: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword location: :paramtype location: str @@ -4743,8 +5271,12 @@ class DataMigrationServiceList(_serialization.Model): } def __init__( - self, *, value: Optional[List["_models.DataMigrationService"]] = None, next_link: Optional[str] = None, **kwargs - ): + self, + *, + value: Optional[List["_models.DataMigrationService"]] = None, + next_link: Optional[str] = None, + **kwargs: Any + ) -> None: """ :keyword value: List of services. :paramtype value: list[~azure.mgmt.datamigration.models.DataMigrationService] @@ -4789,8 +5321,8 @@ def __init__( status: Optional[str] = None, vm_size: Optional[str] = None, supported_task_types: Optional[List[str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword agent_version: The DMS instance agent version. :paramtype agent_version: str @@ -4827,7 +5359,9 @@ class DeleteNode(_serialization.Model): "integration_runtime_name": {"key": "integrationRuntimeName", "type": "str"}, } - def __init__(self, *, node_name: Optional[str] = None, integration_runtime_name: Optional[str] = None, **kwargs): + def __init__( + self, *, node_name: Optional[str] = None, integration_runtime_name: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword node_name: The name of node to delete. :paramtype node_name: str @@ -4839,6 +5373,77 @@ def __init__(self, *, node_name: Optional[str] = None, integration_runtime_name: self.integration_runtime_name = integration_runtime_name +class ErrorAdditionalInfo(_serialization.Model): + """The resource management error additional info. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: JSON + """ + + _validation = { + "type": {"readonly": True}, + "info": {"readonly": True}, + } + + _attribute_map = { + "type": {"key": "type", "type": "str"}, + "info": {"key": "info", "type": "object"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.type = None + self.info = None + + +class ErrorDetail(_serialization.Model): + """The error detail. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.mgmt.datamigration.models.ErrorDetail] + :ivar additional_info: The error additional info. + :vartype additional_info: list[~azure.mgmt.datamigration.models.ErrorAdditionalInfo] + """ + + _validation = { + "code": {"readonly": True}, + "message": {"readonly": True}, + "target": {"readonly": True}, + "details": {"readonly": True}, + "additional_info": {"readonly": True}, + } + + _attribute_map = { + "code": {"key": "code", "type": "str"}, + "message": {"key": "message", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "details": {"key": "details", "type": "[ErrorDetail]"}, + "additional_info": {"key": "additionalInfo", "type": "[ErrorAdditionalInfo]"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.code = None + self.message = None + self.target = None + self.details = None + self.additional_info = None + + class ErrorInfo(_serialization.Model): """Error details. @@ -4860,13 +5465,34 @@ class ErrorInfo(_serialization.Model): "message": {"key": "message", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.code = None self.message = None +class ErrorResponse(_serialization.Model): + """Common error response for all Azure Resource Manager APIs to return error details for failed + operations. (This also follows the OData error response format.). + + :ivar error: The error object. + :vartype error: ~azure.mgmt.datamigration.models.ErrorDetail + """ + + _attribute_map = { + "error": {"key": "error", "type": "ErrorDetail"}, + } + + def __init__(self, *, error: Optional["_models.ErrorDetail"] = None, **kwargs: Any) -> None: + """ + :keyword error: The error object. + :paramtype error: ~azure.mgmt.datamigration.models.ErrorDetail + """ + super().__init__(**kwargs) + self.error = error + + class ExecutionStatistics(_serialization.Model): """Description about the errors happen while performing migration validation. @@ -4902,8 +5528,8 @@ def __init__( wait_stats: Optional[Dict[str, "_models.WaitStatistics"]] = None, has_errors: Optional[bool] = None, sql_errors: Optional[List[str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword execution_count: No. of query executions. :paramtype execution_count: int @@ -4943,8 +5569,8 @@ class FileList(_serialization.Model): } def __init__( - self, *, value: Optional[List["_models.ProjectFile"]] = None, next_link: Optional[str] = None, **kwargs - ): + self, *, value: Optional[List["_models.ProjectFile"]] = None, next_link: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword value: List of files. :paramtype value: list[~azure.mgmt.datamigration.models.ProjectFile] @@ -4959,7 +5585,7 @@ def __init__( class FileShare(_serialization.Model): """File share information with Path, Username, and Password. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar user_name: User name credential to connect to the share location. :vartype user_name: str @@ -4979,7 +5605,9 @@ class FileShare(_serialization.Model): "path": {"key": "path", "type": "str"}, } - def __init__(self, *, path: str, user_name: Optional[str] = None, password: Optional[str] = None, **kwargs): + def __init__( + self, *, path: str, user_name: Optional[str] = None, password: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword user_name: User name credential to connect to the share location. :paramtype user_name: str @@ -5008,7 +5636,7 @@ class FileStorageInfo(_serialization.Model): "headers": {"key": "headers", "type": "{str}"}, } - def __init__(self, *, uri: Optional[str] = None, headers: Optional[Dict[str, str]] = None, **kwargs): + def __init__(self, *, uri: Optional[str] = None, headers: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: """ :keyword uri: A URI that can be used to access the file content. :paramtype uri: str @@ -5023,7 +5651,7 @@ def __init__(self, *, uri: Optional[str] = None, headers: Optional[Dict[str, str class GetProjectDetailsNonSqlTaskInput(_serialization.Model): """Input for the task that reads configuration from project artifacts. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar project_name: Name of the migration project. Required. :vartype project_name: str @@ -5042,7 +5670,7 @@ class GetProjectDetailsNonSqlTaskInput(_serialization.Model): "project_location": {"key": "projectLocation", "type": "str"}, } - def __init__(self, *, project_name: str, project_location: str, **kwargs): + def __init__(self, *, project_name: str, project_location: str, **kwargs: Any) -> None: """ :keyword project_name: Name of the migration project. Required. :paramtype project_name: str @@ -5058,7 +5686,7 @@ def __init__(self, *, project_name: str, project_location: str, **kwargs): class GetTdeCertificatesSqlTaskInput(_serialization.Model): """Input for the task that gets TDE certificates in Base64 encoded format. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar connection_info: Connection information for SQL Server. Required. :vartype connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -5088,8 +5716,8 @@ def __init__( connection_info: "_models.SqlConnectionInfo", backup_file_share: "_models.FileShare", selected_certificates: List["_models.SelectedCertificateInput"], - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword connection_info: Connection information for SQL Server. Required. :paramtype connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -5128,7 +5756,7 @@ class GetTdeCertificatesSqlTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.base64_encoded_certificates = None @@ -5140,7 +5768,7 @@ class GetTdeCertificatesSqlTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -5199,8 +5827,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.GetTdeCertificatesSqlTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -5216,7 +5844,7 @@ def __init__( class GetUserTablesMySqlTaskInput(_serialization.Model): """Input for the task that collects user tables for the given list of databases. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar connection_info: Connection information for SQL Server. Required. :vartype connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo @@ -5234,7 +5862,9 @@ class GetUserTablesMySqlTaskInput(_serialization.Model): "selected_databases": {"key": "selectedDatabases", "type": "[str]"}, } - def __init__(self, *, connection_info: "_models.MySqlConnectionInfo", selected_databases: List[str], **kwargs): + def __init__( + self, *, connection_info: "_models.MySqlConnectionInfo", selected_databases: List[str], **kwargs: Any + ) -> None: """ :keyword connection_info: Connection information for SQL Server. Required. :paramtype connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo @@ -5271,7 +5901,7 @@ class GetUserTablesMySqlTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -5284,7 +5914,7 @@ class GetUserTablesMySqlTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -5343,8 +5973,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.GetUserTablesMySqlTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -5358,9 +5988,10 @@ def __init__( class GetUserTablesOracleTaskInput(_serialization.Model): - """Input for the task that gets the list of tables contained within a provided list of Oracle schemas. + """Input for the task that gets the list of tables contained within a provided list of Oracle + schemas. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar connection_info: Information for connecting to Oracle source. Required. :vartype connection_info: ~azure.mgmt.datamigration.models.OracleConnectionInfo @@ -5378,7 +6009,9 @@ class GetUserTablesOracleTaskInput(_serialization.Model): "selected_schemas": {"key": "selectedSchemas", "type": "[str]"}, } - def __init__(self, *, connection_info: "_models.OracleConnectionInfo", selected_schemas: List[str], **kwargs): + def __init__( + self, *, connection_info: "_models.OracleConnectionInfo", selected_schemas: List[str], **kwargs: Any + ) -> None: """ :keyword connection_info: Information for connecting to Oracle source. Required. :paramtype connection_info: ~azure.mgmt.datamigration.models.OracleConnectionInfo @@ -5391,7 +6024,8 @@ def __init__(self, *, connection_info: "_models.OracleConnectionInfo", selected_ class GetUserTablesOracleTaskOutput(_serialization.Model): - """Output for the task that gets the list of tables contained within a provided list of Oracle schemas. + """Output for the task that gets the list of tables contained within a provided list of Oracle + schemas. Variables are only populated by the server, and will be ignored when sending a request. @@ -5415,7 +6049,7 @@ class GetUserTablesOracleTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.schema_name = None @@ -5428,7 +6062,7 @@ class GetUserTablesOracleTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -5487,8 +6121,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.GetUserTablesOracleTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -5504,7 +6138,7 @@ def __init__( class GetUserTablesPostgreSqlTaskInput(_serialization.Model): """Input for the task that gets the list of tables for a provided list of PostgreSQL databases. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar connection_info: Information for connecting to PostgreSQL source. Required. :vartype connection_info: ~azure.mgmt.datamigration.models.PostgreSqlConnectionInfo @@ -5522,7 +6156,9 @@ class GetUserTablesPostgreSqlTaskInput(_serialization.Model): "selected_databases": {"key": "selectedDatabases", "type": "[str]"}, } - def __init__(self, *, connection_info: "_models.PostgreSqlConnectionInfo", selected_databases: List[str], **kwargs): + def __init__( + self, *, connection_info: "_models.PostgreSqlConnectionInfo", selected_databases: List[str], **kwargs: Any + ) -> None: """ :keyword connection_info: Information for connecting to PostgreSQL source. Required. :paramtype connection_info: ~azure.mgmt.datamigration.models.PostgreSqlConnectionInfo @@ -5560,7 +6196,7 @@ class GetUserTablesPostgreSqlTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.database_name = None @@ -5573,7 +6209,7 @@ class GetUserTablesPostgreSqlTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -5632,8 +6268,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.GetUserTablesPostgreSqlTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -5649,7 +6285,7 @@ def __init__( class GetUserTablesSqlSyncTaskInput(_serialization.Model): """Input for the task that collects user tables for the given list of databases. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Connection information for SQL Server. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -5682,8 +6318,8 @@ def __init__( target_connection_info: "_models.SqlConnectionInfo", selected_source_databases: List[str], selected_target_databases: List[str], - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Connection information for SQL Server. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -5732,7 +6368,7 @@ class GetUserTablesSqlSyncTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.databases_to_source_tables = None @@ -5746,7 +6382,7 @@ class GetUserTablesSqlSyncTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -5805,8 +6441,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.GetUserTablesSqlSyncTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -5822,7 +6458,7 @@ def __init__( class GetUserTablesSqlTaskInput(_serialization.Model): """Input for the task that collects user tables for the given list of databases. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar connection_info: Connection information for SQL Server. Required. :vartype connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -5849,8 +6485,8 @@ def __init__( connection_info: "_models.SqlConnectionInfo", selected_databases: List[str], encrypted_key_for_secure_fields: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword connection_info: Connection information for SQL Server. Required. :paramtype connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -5890,7 +6526,7 @@ class GetUserTablesSqlTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -5903,7 +6539,7 @@ class GetUserTablesSqlTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -5966,8 +6602,8 @@ def __init__( client_data: Optional[Dict[str, str]] = None, input: Optional["_models.GetUserTablesSqlTaskInput"] = None, task_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -5994,7 +6630,7 @@ class InstallOCIDriverTaskInput(_serialization.Model): "driver_package_name": {"key": "driverPackageName", "type": "str"}, } - def __init__(self, *, driver_package_name: Optional[str] = None, **kwargs): + def __init__(self, *, driver_package_name: Optional[str] = None, **kwargs: Any) -> None: """ :keyword driver_package_name: Name of the uploaded driver package to install. :paramtype driver_package_name: str @@ -6020,7 +6656,7 @@ class InstallOCIDriverTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.validation_errors = None @@ -6031,7 +6667,7 @@ class InstallOCIDriverTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -6090,8 +6726,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.InstallOCIDriverTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -6125,7 +6761,7 @@ class IntegrationRuntimeMonitoringData(_serialization.Model): "nodes": {"key": "nodes", "type": "[NodeMonitoringData]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.name = None @@ -6135,7 +6771,7 @@ def __init__(self, **kwargs): class MigrateMISyncCompleteCommandInput(_serialization.Model): """Input for command that completes online migration for an Azure SQL Database Managed Instance. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_database_name: Name of managed instance database. Required. :vartype source_database_name: str @@ -6149,7 +6785,7 @@ class MigrateMISyncCompleteCommandInput(_serialization.Model): "source_database_name": {"key": "sourceDatabaseName", "type": "str"}, } - def __init__(self, *, source_database_name: str, **kwargs): + def __init__(self, *, source_database_name: str, **kwargs: Any) -> None: """ :keyword source_database_name: Name of managed instance database. Required. :paramtype source_database_name: str @@ -6169,7 +6805,7 @@ class MigrateMISyncCompleteCommandOutput(_serialization.Model): "errors": {"key": "errors", "type": "[ReportableException]"}, } - def __init__(self, *, errors: Optional[List["_models.ReportableException"]] = None, **kwargs): + def __init__(self, *, errors: Optional[List["_models.ReportableException"]] = None, **kwargs: Any) -> None: """ :keyword errors: List of errors that happened during the command execution. :paramtype errors: list[~azure.mgmt.datamigration.models.ReportableException] @@ -6179,11 +6815,12 @@ def __init__(self, *, errors: Optional[List["_models.ReportableException"]] = No class MigrateMISyncCompleteCommandProperties(CommandProperties): - """Properties for the command that completes online migration for an Azure SQL Database Managed Instance. + """Properties for the command that completes online migration for an Azure SQL Database Managed + Instance. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar command_type: Command type. Required. Known values are: "Migrate.Sync.Complete.Database", "Migrate.SqlServer.AzureDbSqlMi.Complete", "cancel", "finish", and "restart". @@ -6214,7 +6851,7 @@ class MigrateMISyncCompleteCommandProperties(CommandProperties): "output": {"key": "output", "type": "MigrateMISyncCompleteCommandOutput"}, } - def __init__(self, *, input: Optional["_models.MigrateMISyncCompleteCommandInput"] = None, **kwargs): + def __init__(self, *, input: Optional["_models.MigrateMISyncCompleteCommandInput"] = None, **kwargs: Any) -> None: """ :keyword input: Command input. :paramtype input: ~azure.mgmt.datamigration.models.MigrateMISyncCompleteCommandInput @@ -6230,7 +6867,7 @@ class MigrateMongoDbTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -6289,8 +6926,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.MongoDbMigrationSettings"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -6303,8 +6940,9 @@ def __init__( self.output = None -class MigrateMySqlAzureDbForMySqlOfflineDatabaseInput(_serialization.Model): - """Database specific information for offline MySQL to Azure Database for MySQL migration task inputs. +class MigrateMySqlAzureDbForMySqlOfflineDatabaseInput(_serialization.Model): # pylint: disable=name-too-long + """Database specific information for offline MySQL to Azure Database for MySQL migration task + inputs. :ivar name: Name of the database. :vartype name: str @@ -6327,8 +6965,8 @@ def __init__( name: Optional[str] = None, target_database_name: Optional[str] = None, table_map: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: Name of the database. :paramtype name: str @@ -6344,10 +6982,11 @@ def __init__( self.table_map = table_map -class MigrateMySqlAzureDbForMySqlOfflineTaskInput(_serialization.Model): - """Input for the task that migrates MySQL databases to Azure Database for MySQL for offline migrations. +class MigrateMySqlAzureDbForMySqlOfflineTaskInput(_serialization.Model): # pylint: disable=name-too-long + """Input for the task that migrates MySQL databases to Azure Database for MySQL for offline + migrations. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Connection information for source MySQL. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo @@ -6394,8 +7033,8 @@ def __init__( started_on: Optional[datetime.datetime] = None, optional_agent_settings: Optional[Dict[str, str]] = None, encrypted_key_for_secure_fields: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Connection information for source MySQL. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo @@ -6425,8 +7064,9 @@ def __init__( self.encrypted_key_for_secure_fields = encrypted_key_for_secure_fields -class MigrateMySqlAzureDbForMySqlOfflineTaskOutput(_serialization.Model): - """Output for the task that migrates MySQL databases to Azure Database for MySQL for offline migrations. +class MigrateMySqlAzureDbForMySqlOfflineTaskOutput(_serialization.Model): # pylint: disable=name-too-long + """Output for the task that migrates MySQL databases to Azure Database for MySQL for offline + migrations. You probably want to use the sub-classes and not this class directly. Known sub-classes are: MigrateMySqlAzureDbForMySqlOfflineTaskOutputDatabaseLevel, @@ -6436,7 +7076,7 @@ class MigrateMySqlAzureDbForMySqlOfflineTaskOutput(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -6463,7 +7103,7 @@ class MigrateMySqlAzureDbForMySqlOfflineTaskOutput(_serialization.Model): } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -6472,12 +7112,12 @@ def __init__(self, **kwargs): class MigrateMySqlAzureDbForMySqlOfflineTaskOutputDatabaseLevel( MigrateMySqlAzureDbForMySqlOfflineTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateMySqlAzureDbForMySqlOfflineTaskOutputDatabaseLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -6558,7 +7198,7 @@ class MigrateMySqlAzureDbForMySqlOfflineTaskOutputDatabaseLevel( "object_summary": {"key": "objectSummary", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "DatabaseLevelOutput" @@ -6579,12 +7219,14 @@ def __init__(self, **kwargs): self.object_summary = None -class MigrateMySqlAzureDbForMySqlOfflineTaskOutputError(MigrateMySqlAzureDbForMySqlOfflineTaskOutput): +class MigrateMySqlAzureDbForMySqlOfflineTaskOutputError( + MigrateMySqlAzureDbForMySqlOfflineTaskOutput +): # pylint: disable=name-too-long """MigrateMySqlAzureDbForMySqlOfflineTaskOutputError. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -6606,7 +7248,7 @@ class MigrateMySqlAzureDbForMySqlOfflineTaskOutputError(MigrateMySqlAzureDbForMy "error": {"key": "error", "type": "ReportableException"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "ErrorOutput" @@ -6615,12 +7257,12 @@ def __init__(self, **kwargs): class MigrateMySqlAzureDbForMySqlOfflineTaskOutputMigrationLevel( MigrateMySqlAzureDbForMySqlOfflineTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateMySqlAzureDbForMySqlOfflineTaskOutputMigrationLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -6704,8 +7346,8 @@ def __init__( *, databases: Optional[str] = None, migration_report_result: Optional["_models.MigrationReportResult"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword databases: Selected databases as a map from database name to database id. :paramtype databases: str @@ -6734,12 +7376,12 @@ def __init__( class MigrateMySqlAzureDbForMySqlOfflineTaskOutputTableLevel( MigrateMySqlAzureDbForMySqlOfflineTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateMySqlAzureDbForMySqlOfflineTaskOutputTableLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -6799,7 +7441,7 @@ class MigrateMySqlAzureDbForMySqlOfflineTaskOutputTableLevel( "last_storage_update": {"key": "lastStorageUpdate", "type": "iso-8601"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "TableLevelOutput" @@ -6815,12 +7457,13 @@ def __init__(self, **kwargs): self.last_storage_update = None -class MigrateMySqlAzureDbForMySqlOfflineTaskProperties(ProjectTaskProperties): - """Properties for the task that migrates MySQL databases to Azure Database for MySQL for offline migrations. +class MigrateMySqlAzureDbForMySqlOfflineTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long + """Properties for the task that migrates MySQL databases to Azure Database for MySQL for offline + migrations. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -6888,8 +7531,8 @@ def __init__( input: Optional["_models.MigrateMySqlAzureDbForMySqlOfflineTaskInput"] = None, is_cloneable: Optional[bool] = None, task_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -6908,7 +7551,7 @@ def __init__( self.task_id = task_id -class MigrateMySqlAzureDbForMySqlSyncDatabaseInput(_serialization.Model): +class MigrateMySqlAzureDbForMySqlSyncDatabaseInput(_serialization.Model): # pylint: disable=name-too-long """Database specific information for MySQL to Azure Database for MySQL migration task inputs. :ivar name: Name of the database. @@ -6944,8 +7587,8 @@ def __init__( source_setting: Optional[Dict[str, str]] = None, target_setting: Optional[Dict[str, str]] = None, table_map: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: Name of the database. :paramtype name: str @@ -6971,9 +7614,10 @@ def __init__( class MigrateMySqlAzureDbForMySqlSyncTaskInput(_serialization.Model): - """Input for the task that migrates MySQL databases to Azure Database for MySQL for online migrations. + """Input for the task that migrates MySQL databases to Azure Database for MySQL for online + migrations. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Connection information for source MySQL. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo @@ -7003,8 +7647,8 @@ def __init__( source_connection_info: "_models.MySqlConnectionInfo", target_connection_info: "_models.MySqlConnectionInfo", selected_databases: List["_models.MigrateMySqlAzureDbForMySqlSyncDatabaseInput"], - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Connection information for source MySQL. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.MySqlConnectionInfo @@ -7021,8 +7665,9 @@ def __init__( self.selected_databases = selected_databases -class MigrateMySqlAzureDbForMySqlSyncTaskOutput(_serialization.Model): - """Output for the task that migrates MySQL databases to Azure Database for MySQL for online migrations. +class MigrateMySqlAzureDbForMySqlSyncTaskOutput(_serialization.Model): # pylint: disable=name-too-long + """Output for the task that migrates MySQL databases to Azure Database for MySQL for online + migrations. You probably want to use the sub-classes and not this class directly. Known sub-classes are: MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseError, @@ -7033,7 +7678,7 @@ class MigrateMySqlAzureDbForMySqlSyncTaskOutput(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -7061,19 +7706,21 @@ class MigrateMySqlAzureDbForMySqlSyncTaskOutput(_serialization.Model): } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None self.result_type: Optional[str] = None -class MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseError(MigrateMySqlAzureDbForMySqlSyncTaskOutput): +class MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseError( + MigrateMySqlAzureDbForMySqlSyncTaskOutput +): # pylint: disable=name-too-long """MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseError. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -7102,8 +7749,8 @@ def __init__( *, error_message: Optional[str] = None, events: Optional[List["_models.SyncMigrationDatabaseErrorEvent"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword error_message: Error message. :paramtype error_message: str @@ -7118,12 +7765,12 @@ def __init__( class MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseLevel( MigrateMySqlAzureDbForMySqlSyncTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -7206,7 +7853,7 @@ class MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseLevel( "latency": {"key": "latency", "type": "int"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "DatabaseLevelOutput" @@ -7227,12 +7874,14 @@ def __init__(self, **kwargs): self.latency = None -class MigrateMySqlAzureDbForMySqlSyncTaskOutputError(MigrateMySqlAzureDbForMySqlSyncTaskOutput): +class MigrateMySqlAzureDbForMySqlSyncTaskOutputError( + MigrateMySqlAzureDbForMySqlSyncTaskOutput +): # pylint: disable=name-too-long """MigrateMySqlAzureDbForMySqlSyncTaskOutputError. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -7254,19 +7903,21 @@ class MigrateMySqlAzureDbForMySqlSyncTaskOutputError(MigrateMySqlAzureDbForMySql "error": {"key": "error", "type": "ReportableException"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "ErrorOutput" self.error = None -class MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevel(MigrateMySqlAzureDbForMySqlSyncTaskOutput): +class MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevel( + MigrateMySqlAzureDbForMySqlSyncTaskOutput +): # pylint: disable=name-too-long """MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -7308,7 +7959,7 @@ class MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevel(MigrateMySqlAzureD "target_server": {"key": "targetServer", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "MigrationLevelOutput" @@ -7322,12 +7973,12 @@ def __init__(self, **kwargs): class MigrateMySqlAzureDbForMySqlSyncTaskOutputTableLevel( MigrateMySqlAzureDbForMySqlSyncTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateMySqlAzureDbForMySqlSyncTaskOutputTableLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -7398,7 +8049,7 @@ class MigrateMySqlAzureDbForMySqlSyncTaskOutputTableLevel( "last_modified_time": {"key": "lastModifiedTime", "type": "iso-8601"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "TableLevelOutput" @@ -7417,12 +8068,13 @@ def __init__(self, **kwargs): self.last_modified_time = None -class MigrateMySqlAzureDbForMySqlSyncTaskProperties(ProjectTaskProperties): - """Properties for the task that migrates MySQL databases to Azure Database for MySQL for online migrations. +class MigrateMySqlAzureDbForMySqlSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long + """Properties for the task that migrates MySQL databases to Azure Database for MySQL for online + migrations. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -7482,8 +8134,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.MigrateMySqlAzureDbForMySqlSyncTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -7496,12 +8148,13 @@ def __init__( self.output = None -class MigrateOracleAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties): - """Properties for the task that migrates Oracle to Azure Database for PostgreSQL for online migrations. +class MigrateOracleAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long + """Properties for the task that migrates Oracle to Azure Database for PostgreSQL for online + migrations. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -7561,8 +8214,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.MigrateOracleAzureDbPostgreSqlSyncTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -7575,8 +8228,9 @@ def __init__( self.output = None -class MigrateOracleAzureDbPostgreSqlSyncDatabaseInput(_serialization.Model): - """Database specific information for Oracle to Azure Database for PostgreSQL migration task inputs. +class MigrateOracleAzureDbPostgreSqlSyncDatabaseInput(_serialization.Model): # pylint: disable=name-too-long + """Database specific information for Oracle to Azure Database for PostgreSQL migration task + inputs. :ivar case_manipulation: How to handle object name casing: either Preserve or ToLower. :vartype case_manipulation: str @@ -7619,8 +8273,8 @@ def __init__( migration_setting: Optional[Dict[str, str]] = None, source_setting: Optional[Dict[str, str]] = None, target_setting: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword case_manipulation: How to handle object name casing: either Preserve or ToLower. :paramtype case_manipulation: str @@ -7651,10 +8305,11 @@ def __init__( self.target_setting = target_setting -class MigrateOracleAzureDbPostgreSqlSyncTaskInput(_serialization.Model): - """Input for the task that migrates Oracle databases to Azure Database for PostgreSQL for online migrations. +class MigrateOracleAzureDbPostgreSqlSyncTaskInput(_serialization.Model): # pylint: disable=name-too-long + """Input for the task that migrates Oracle databases to Azure Database for PostgreSQL for online + migrations. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar selected_databases: Databases to migrate. Required. :vartype selected_databases: @@ -7684,8 +8339,8 @@ def __init__( selected_databases: List["_models.MigrateOracleAzureDbPostgreSqlSyncDatabaseInput"], target_connection_info: "_models.PostgreSqlConnectionInfo", source_connection_info: "_models.OracleConnectionInfo", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword selected_databases: Databases to migrate. Required. :paramtype selected_databases: @@ -7702,8 +8357,9 @@ def __init__( self.source_connection_info = source_connection_info -class MigrateOracleAzureDbPostgreSqlSyncTaskOutput(_serialization.Model): - """Output for the task that migrates Oracle databases to Azure Database for PostgreSQL for online migrations. +class MigrateOracleAzureDbPostgreSqlSyncTaskOutput(_serialization.Model): # pylint: disable=name-too-long + """Output for the task that migrates Oracle databases to Azure Database for PostgreSQL for online + migrations. You probably want to use the sub-classes and not this class directly. Known sub-classes are: MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseError, @@ -7714,7 +8370,7 @@ class MigrateOracleAzureDbPostgreSqlSyncTaskOutput(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -7742,19 +8398,21 @@ class MigrateOracleAzureDbPostgreSqlSyncTaskOutput(_serialization.Model): } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None self.result_type: Optional[str] = None -class MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseError(MigrateOracleAzureDbPostgreSqlSyncTaskOutput): +class MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseError( + MigrateOracleAzureDbPostgreSqlSyncTaskOutput +): # pylint: disable=name-too-long """MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseError. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -7783,8 +8441,8 @@ def __init__( *, error_message: Optional[str] = None, events: Optional[List["_models.SyncMigrationDatabaseErrorEvent"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword error_message: Error message. :paramtype error_message: str @@ -7799,12 +8457,12 @@ def __init__( class MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseLevel( MigrateOracleAzureDbPostgreSqlSyncTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -7887,7 +8545,7 @@ class MigrateOracleAzureDbPostgreSqlSyncTaskOutputDatabaseLevel( "latency": {"key": "latency", "type": "int"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "DatabaseLevelOutput" @@ -7908,12 +8566,14 @@ def __init__(self, **kwargs): self.latency = None -class MigrateOracleAzureDbPostgreSqlSyncTaskOutputError(MigrateOracleAzureDbPostgreSqlSyncTaskOutput): +class MigrateOracleAzureDbPostgreSqlSyncTaskOutputError( + MigrateOracleAzureDbPostgreSqlSyncTaskOutput +): # pylint: disable=name-too-long """MigrateOracleAzureDbPostgreSqlSyncTaskOutputError. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -7935,19 +8595,21 @@ class MigrateOracleAzureDbPostgreSqlSyncTaskOutputError(MigrateOracleAzureDbPost "error": {"key": "error", "type": "ReportableException"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "ErrorOutput" self.error = None -class MigrateOracleAzureDbPostgreSqlSyncTaskOutputMigrationLevel(MigrateOracleAzureDbPostgreSqlSyncTaskOutput): +class MigrateOracleAzureDbPostgreSqlSyncTaskOutputMigrationLevel( + MigrateOracleAzureDbPostgreSqlSyncTaskOutput +): # pylint: disable=name-too-long """MigrateOracleAzureDbPostgreSqlSyncTaskOutputMigrationLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -7989,7 +8651,7 @@ class MigrateOracleAzureDbPostgreSqlSyncTaskOutputMigrationLevel(MigrateOracleAz "target_server": {"key": "targetServer", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "MigrationLevelOutput" @@ -8003,12 +8665,12 @@ def __init__(self, **kwargs): class MigrateOracleAzureDbPostgreSqlSyncTaskOutputTableLevel( MigrateOracleAzureDbPostgreSqlSyncTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateOracleAzureDbPostgreSqlSyncTaskOutputTableLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -8079,7 +8741,7 @@ class MigrateOracleAzureDbPostgreSqlSyncTaskOutputTableLevel( "last_modified_time": {"key": "lastModifiedTime", "type": "iso-8601"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "TableLevelOutput" @@ -8098,8 +8760,9 @@ def __init__(self, **kwargs): self.last_modified_time = None -class MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInput(_serialization.Model): - """Database specific information for PostgreSQL to Azure Database for PostgreSQL migration task inputs. +class MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInput(_serialization.Model): # pylint: disable=name-too-long + """Database specific information for PostgreSQL to Azure Database for PostgreSQL migration task + inputs. Variables are only populated by the server, and will be ignored when sending a request. @@ -8147,8 +8810,8 @@ def __init__( source_setting: Optional[Dict[str, str]] = None, target_setting: Optional[Dict[str, str]] = None, selected_tables: Optional[List["_models.MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseTableInput"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: Name of the database. :paramtype name: str @@ -8175,7 +8838,9 @@ def __init__( self.selected_tables = selected_tables -class MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseTableInput(_serialization.Model): +class MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseTableInput( + _serialization.Model +): # pylint: disable=name-too-long """Selected tables for the migration. :ivar name: Name of the table to migrate. @@ -8186,7 +8851,7 @@ class MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseTableInput(_serialization "name": {"key": "name", "type": "str"}, } - def __init__(self, *, name: Optional[str] = None, **kwargs): + def __init__(self, *, name: Optional[str] = None, **kwargs: Any) -> None: """ :keyword name: Name of the table to migrate. :paramtype name: str @@ -8195,12 +8860,13 @@ def __init__(self, *, name: Optional[str] = None, **kwargs): self.name = name -class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInput(_serialization.Model): - """Input for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for online migrations. +class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInput(_serialization.Model): # pylint: disable=name-too-long + """Input for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for + online migrations. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar selected_databases: Databases to migrate. Required. :vartype selected_databases: @@ -8241,8 +8907,8 @@ def __init__( target_connection_info: "_models.PostgreSqlConnectionInfo", source_connection_info: "_models.PostgreSqlConnectionInfo", encrypted_key_for_secure_fields: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword selected_databases: Databases to migrate. Required. :paramtype selected_databases: @@ -8263,8 +8929,9 @@ def __init__( self.started_on = None -class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model): - """Output for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for online migrations. +class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model): # pylint: disable=name-too-long + """Output for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for + online migrations. You probably want to use the sub-classes and not this class directly. Known sub-classes are: MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseError, @@ -8275,7 +8942,7 @@ class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -8303,7 +8970,7 @@ class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput(_serialization.Model): } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -8312,12 +8979,12 @@ def __init__(self, **kwargs): class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseError( MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput -): +): # pylint: disable=name-too-long """MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseError. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -8346,8 +9013,8 @@ def __init__( *, error_message: Optional[str] = None, events: Optional[List["_models.SyncMigrationDatabaseErrorEvent"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword error_message: Error message. :paramtype error_message: str @@ -8362,12 +9029,12 @@ def __init__( class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseLevel( MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -8450,7 +9117,7 @@ class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseLevel( "latency": {"key": "latency", "type": "int"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "DatabaseLevelOutput" @@ -8471,12 +9138,14 @@ def __init__(self, **kwargs): self.latency = None -class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputError(MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput): +class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputError( + MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput +): # pylint: disable=name-too-long """MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputError. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -8501,7 +9170,9 @@ class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputError(MigratePostgreSql "events": {"key": "events", "type": "[SyncMigrationDatabaseErrorEvent]"}, } - def __init__(self, *, events: Optional[List["_models.SyncMigrationDatabaseErrorEvent"]] = None, **kwargs): + def __init__( + self, *, events: Optional[List["_models.SyncMigrationDatabaseErrorEvent"]] = None, **kwargs: Any + ) -> None: """ :keyword events: List of error events. :paramtype events: list[~azure.mgmt.datamigration.models.SyncMigrationDatabaseErrorEvent] @@ -8514,12 +9185,12 @@ def __init__(self, *, events: Optional[List["_models.SyncMigrationDatabaseErrorE class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputMigrationLevel( MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputMigrationLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -8579,7 +9250,7 @@ class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputMigrationLevel( "database_count": {"key": "databaseCount", "type": "float"}, } - def __init__(self, *, database_count: Optional[float] = None, **kwargs): + def __init__(self, *, database_count: Optional[float] = None, **kwargs: Any) -> None: """ :keyword database_count: Number of databases to include. :paramtype database_count: float @@ -8600,12 +9271,12 @@ def __init__(self, *, database_count: Optional[float] = None, **kwargs): class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputTableLevel( MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputTableLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -8676,7 +9347,7 @@ class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputTableLevel( "last_modified_time": {"key": "lastModifiedTime", "type": "iso-8601"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "TableLevelOutput" @@ -8695,12 +9366,13 @@ def __init__(self, **kwargs): self.last_modified_time = None -class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties): - """Properties for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for online migrations. +class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long + """Properties for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for + online migrations. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -8773,8 +9445,8 @@ def __init__( task_id: Optional[str] = None, created_on: Optional[str] = None, is_cloneable: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -8824,8 +9496,8 @@ def __init__( id: Optional[str] = None, # pylint: disable=redefined-builtin target_database_name: Optional[str] = None, schema_setting: Optional["_models.SchemaMigrationSetting"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: Name of source database. :paramtype name: str @@ -8846,7 +9518,7 @@ def __init__( class SqlMigrationTaskInput(_serialization.Model): """Base class for migration task input. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Information for connecting to source. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -8869,8 +9541,8 @@ def __init__( *, source_connection_info: "_models.SqlConnectionInfo", target_connection_info: "_models.SqlConnectionInfo", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Information for connecting to source. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -8885,7 +9557,7 @@ def __init__( class MigrateSchemaSqlServerSqlDbTaskInput(SqlMigrationTaskInput): """Input for task that migrates Schema for SQL Server databases to Azure SQL databases. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Information for connecting to source. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -8922,8 +9594,8 @@ def __init__( selected_databases: List["_models.MigrateSchemaSqlServerSqlDbDatabaseInput"], encrypted_key_for_secure_fields: Optional[str] = None, started_on: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Information for connecting to source. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -8954,7 +9626,7 @@ class MigrateSchemaSqlServerSqlDbTaskOutput(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -8981,7 +9653,7 @@ class MigrateSchemaSqlServerSqlDbTaskOutput(_serialization.Model): } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -8990,12 +9662,12 @@ def __init__(self, **kwargs): class MigrateSchemaSqlServerSqlDbTaskOutputDatabaseLevel( MigrateSchemaSqlServerSqlDbTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateSchemaSqlServerSqlDbTaskOutputDatabaseLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -9057,7 +9729,7 @@ class MigrateSchemaSqlServerSqlDbTaskOutputDatabaseLevel( "file_id": {"key": "fileId", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "DatabaseLevelOutput" @@ -9073,12 +9745,14 @@ def __init__(self, **kwargs): self.file_id = None -class MigrateSchemaSqlServerSqlDbTaskOutputError(MigrateSchemaSqlServerSqlDbTaskOutput): +class MigrateSchemaSqlServerSqlDbTaskOutputError( + MigrateSchemaSqlServerSqlDbTaskOutput +): # pylint: disable=name-too-long """MigrateSchemaSqlServerSqlDbTaskOutputError. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -9104,7 +9778,7 @@ class MigrateSchemaSqlServerSqlDbTaskOutputError(MigrateSchemaSqlServerSqlDbTask "error_text": {"key": "errorText", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "SchemaErrorOutput" @@ -9112,12 +9786,14 @@ def __init__(self, **kwargs): self.error_text = None -class MigrateSchemaSqlServerSqlDbTaskOutputMigrationLevel(MigrateSchemaSqlServerSqlDbTaskOutput): +class MigrateSchemaSqlServerSqlDbTaskOutputMigrationLevel( + MigrateSchemaSqlServerSqlDbTaskOutput +): # pylint: disable=name-too-long """MigrateSchemaSqlServerSqlDbTaskOutputMigrationLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -9164,7 +9840,7 @@ class MigrateSchemaSqlServerSqlDbTaskOutputMigrationLevel(MigrateSchemaSqlServer "target_server_brand_version": {"key": "targetServerBrandVersion", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "MigrationLevelOutput" @@ -9177,12 +9853,12 @@ def __init__(self, **kwargs): self.target_server_brand_version = None -class MigrateSchemaSqlServerSqlDbTaskProperties(ProjectTaskProperties): +class MigrateSchemaSqlServerSqlDbTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long """Properties for task that migrates Schema for SQL Server databases to Azure SQL databases. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -9253,8 +9929,8 @@ def __init__( created_on: Optional[str] = None, task_id: Optional[str] = None, is_cloneable: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -9281,7 +9957,7 @@ class MigrateSchemaSqlTaskOutputError(MigrateSchemaSqlServerSqlDbTaskOutput): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -9303,7 +9979,7 @@ class MigrateSchemaSqlTaskOutputError(MigrateSchemaSqlServerSqlDbTaskOutput): "error": {"key": "error", "type": "ReportableException"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "ErrorOutput" @@ -9337,8 +10013,8 @@ def __init__( restore_database_name: Optional[str] = None, backup_and_restore_folder: Optional[str] = None, database_files: Optional[List["_models.DatabaseFileInput"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: Name of the database. :paramtype name: str @@ -9392,8 +10068,8 @@ def __init__( table_map: Optional[Dict[str, str]] = None, schema_setting: Optional[JSON] = None, id: Optional[str] = None, # pylint: disable=redefined-builtin - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: Name of the database. :paramtype name: str @@ -9461,8 +10137,8 @@ def __init__( migration_setting: Optional[Dict[str, str]] = None, source_setting: Optional[Dict[str, str]] = None, target_setting: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword id: Unique identifier for database. :paramtype id: str @@ -9493,9 +10169,10 @@ def __init__( class MigrateSqlServerSqlDbSyncTaskInput(SqlMigrationTaskInput): - """Input for the task that migrates on-prem SQL Server databases to Azure SQL Database for online migrations. + """Input for the task that migrates on-prem SQL Server databases to Azure SQL Database for online + migrations. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Information for connecting to source. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -9528,8 +10205,8 @@ def __init__( target_connection_info: "_models.SqlConnectionInfo", selected_databases: List["_models.MigrateSqlServerSqlDbSyncDatabaseInput"], validation_options: Optional["_models.MigrationValidationOptions"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Information for connecting to source. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -9549,7 +10226,8 @@ def __init__( class MigrateSqlServerSqlDbSyncTaskOutput(_serialization.Model): - """Output for the task that migrates on-prem SQL Server databases to Azure SQL Database for online migrations. + """Output for the task that migrates on-prem SQL Server databases to Azure SQL Database for online + migrations. You probably want to use the sub-classes and not this class directly. Known sub-classes are: MigrateSqlServerSqlDbSyncTaskOutputDatabaseError, @@ -9559,7 +10237,7 @@ class MigrateSqlServerSqlDbSyncTaskOutput(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -9587,19 +10265,21 @@ class MigrateSqlServerSqlDbSyncTaskOutput(_serialization.Model): } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None self.result_type: Optional[str] = None -class MigrateSqlServerSqlDbSyncTaskOutputDatabaseError(MigrateSqlServerSqlDbSyncTaskOutput): +class MigrateSqlServerSqlDbSyncTaskOutputDatabaseError( + MigrateSqlServerSqlDbSyncTaskOutput +): # pylint: disable=name-too-long """MigrateSqlServerSqlDbSyncTaskOutputDatabaseError. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -9628,8 +10308,8 @@ def __init__( *, error_message: Optional[str] = None, events: Optional[List["_models.SyncMigrationDatabaseErrorEvent"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword error_message: Error message. :paramtype error_message: str @@ -9644,12 +10324,12 @@ def __init__( class MigrateSqlServerSqlDbSyncTaskOutputDatabaseLevel( MigrateSqlServerSqlDbSyncTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateSqlServerSqlDbSyncTaskOutputDatabaseLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -9732,7 +10412,7 @@ class MigrateSqlServerSqlDbSyncTaskOutputDatabaseLevel( "latency": {"key": "latency", "type": "int"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "DatabaseLevelOutput" @@ -9758,7 +10438,7 @@ class MigrateSqlServerSqlDbSyncTaskOutputError(MigrateSqlServerSqlDbSyncTaskOutp Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -9780,19 +10460,21 @@ class MigrateSqlServerSqlDbSyncTaskOutputError(MigrateSqlServerSqlDbSyncTaskOutp "error": {"key": "error", "type": "ReportableException"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "ErrorOutput" self.error = None -class MigrateSqlServerSqlDbSyncTaskOutputMigrationLevel(MigrateSqlServerSqlDbSyncTaskOutput): +class MigrateSqlServerSqlDbSyncTaskOutputMigrationLevel( + MigrateSqlServerSqlDbSyncTaskOutput +): # pylint: disable=name-too-long """MigrateSqlServerSqlDbSyncTaskOutputMigrationLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -9838,7 +10520,7 @@ class MigrateSqlServerSqlDbSyncTaskOutputMigrationLevel(MigrateSqlServerSqlDbSyn "database_count": {"key": "databaseCount", "type": "int"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "MigrationLevelOutput" @@ -9853,12 +10535,12 @@ def __init__(self, **kwargs): class MigrateSqlServerSqlDbSyncTaskOutputTableLevel( MigrateSqlServerSqlDbSyncTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateSqlServerSqlDbSyncTaskOutputTableLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -9929,7 +10611,7 @@ class MigrateSqlServerSqlDbSyncTaskOutputTableLevel( "last_modified_time": {"key": "lastModifiedTime", "type": "iso-8601"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "TableLevelOutput" @@ -9949,11 +10631,12 @@ def __init__(self, **kwargs): class MigrateSqlServerSqlDbSyncTaskProperties(ProjectTaskProperties): - """Properties for the task that migrates on-prem SQL Server databases to Azure SQL Database for online migrations. + """Properties for the task that migrates on-prem SQL Server databases to Azure SQL Database for + online migrations. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -10012,8 +10695,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.MigrateSqlServerSqlDbSyncTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -10029,7 +10712,7 @@ def __init__( class MigrateSqlServerSqlDbTaskInput(SqlMigrationTaskInput): """Input for the task that migrates on-prem SQL Server databases to Azure SQL Database. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Information for connecting to source. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -10078,8 +10761,8 @@ def __init__( validation_options: Optional["_models.MigrationValidationOptions"] = None, started_on: Optional[str] = None, encrypted_key_for_secure_fields: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Information for connecting to source. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -10123,7 +10806,7 @@ class MigrateSqlServerSqlDbTaskOutput(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -10152,7 +10835,7 @@ class MigrateSqlServerSqlDbTaskOutput(_serialization.Model): } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -10161,12 +10844,12 @@ def __init__(self, **kwargs): class MigrateSqlServerSqlDbTaskOutputDatabaseLevel( MigrateSqlServerSqlDbTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateSqlServerSqlDbTaskOutputDatabaseLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -10243,7 +10926,7 @@ class MigrateSqlServerSqlDbTaskOutputDatabaseLevel( "object_summary": {"key": "objectSummary", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "DatabaseLevelOutput" @@ -10330,7 +11013,7 @@ class MigrationValidationDatabaseLevelResult(_serialization.Model): "status": {"key": "status", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -10347,12 +11030,12 @@ def __init__(self, **kwargs): class MigrateSqlServerSqlDbTaskOutputDatabaseLevelValidationResult( MigrateSqlServerSqlDbTaskOutput, MigrationValidationDatabaseLevelResult -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateSqlServerSqlDbTaskOutputDatabaseLevelValidationResult. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar migration_id: Migration Identifier. :vartype migration_id: str @@ -10420,7 +11103,7 @@ class MigrateSqlServerSqlDbTaskOutputDatabaseLevelValidationResult( "result_type": {"key": "resultType", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.migration_id = None @@ -10441,7 +11124,7 @@ class MigrateSqlServerSqlDbTaskOutputError(MigrateSqlServerSqlDbTaskOutput): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -10463,7 +11146,7 @@ class MigrateSqlServerSqlDbTaskOutputError(MigrateSqlServerSqlDbTaskOutput): "error": {"key": "error", "type": "ReportableException"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "ErrorOutput" @@ -10472,12 +11155,12 @@ def __init__(self, **kwargs): class MigrateSqlServerSqlDbTaskOutputMigrationLevel( MigrateSqlServerSqlDbTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateSqlServerSqlDbTaskOutputMigrationLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -10562,8 +11245,8 @@ def __init__( *, migration_validation_result: Optional["_models.MigrationValidationResult"] = None, migration_report_result: Optional["_models.MigrationReportResult"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword migration_validation_result: Migration Validation Results. :paramtype migration_validation_result: @@ -10593,12 +11276,12 @@ def __init__( class MigrateSqlServerSqlDbTaskOutputTableLevel( MigrateSqlServerSqlDbTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateSqlServerSqlDbTaskOutputTableLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -10654,7 +11337,7 @@ class MigrateSqlServerSqlDbTaskOutputTableLevel( "result_prefix": {"key": "resultPrefix", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "TableLevelOutput" @@ -10704,8 +11387,8 @@ def __init__( self, *, summary_results: Optional[Dict[str, "_models.MigrationValidationDatabaseSummaryResult"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword summary_results: Validation summary results for each database. :paramtype summary_results: dict[str, @@ -10718,12 +11401,14 @@ def __init__( self.status = None -class MigrateSqlServerSqlDbTaskOutputValidationResult(MigrateSqlServerSqlDbTaskOutput, MigrationValidationResult): +class MigrateSqlServerSqlDbTaskOutputValidationResult( + MigrateSqlServerSqlDbTaskOutput, MigrationValidationResult +): # pylint: disable=name-too-long """MigrateSqlServerSqlDbTaskOutputValidationResult. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar migration_id: Migration Identifier. :vartype migration_id: str @@ -10759,8 +11444,8 @@ def __init__( self, *, summary_results: Optional[Dict[str, "_models.MigrationValidationDatabaseSummaryResult"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword summary_results: Validation summary results for each database. :paramtype summary_results: dict[str, @@ -10779,7 +11464,7 @@ class MigrateSqlServerSqlDbTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -10850,8 +11535,8 @@ def __init__( task_id: Optional[str] = None, is_cloneable: Optional[bool] = None, created_on: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -10876,7 +11561,7 @@ def __init__( class MigrateSqlServerSqlMIDatabaseInput(_serialization.Model): """Database specific information for SQL to Azure SQL DB Managed Instance migration task inputs. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar name: Name of the database. Required. :vartype name: str @@ -10911,8 +11596,8 @@ def __init__( backup_file_share: Optional["_models.FileShare"] = None, backup_file_paths: Optional[List[str]] = None, id: Optional[str] = None, # pylint: disable=redefined-builtin - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: Name of the database. Required. :paramtype name: str @@ -10934,9 +11619,10 @@ def __init__( class SqlServerSqlMISyncTaskInput(_serialization.Model): - """Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online scenario. + """Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online + scenario. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar selected_databases: Databases to migrate. Required. :vartype selected_databases: @@ -10950,8 +11636,9 @@ class SqlServerSqlMISyncTaskInput(_serialization.Model): :ivar target_connection_info: Connection information for Azure SQL Database Managed Instance. Required. :vartype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo - :ivar azure_app: Azure Active Directory Application the DMS instance will use to connect to the - target instance of Azure SQL Database Managed Instance and the Azure Storage Account. Required. + :ivar azure_app: Azure Active Directory Application the DMS (classic) instance will use to + connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage + Account. Required. :vartype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp """ @@ -10981,8 +11668,8 @@ def __init__( target_connection_info: "_models.MiSqlConnectionInfo", azure_app: "_models.AzureActiveDirectoryApp", backup_file_share: Optional["_models.FileShare"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword selected_databases: Databases to migrate. Required. :paramtype selected_databases: @@ -10996,9 +11683,9 @@ def __init__( :keyword target_connection_info: Connection information for Azure SQL Database Managed Instance. Required. :paramtype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo - :keyword azure_app: Azure Active Directory Application the DMS instance will use to connect to - the target instance of Azure SQL Database Managed Instance and the Azure Storage Account. - Required. + :keyword azure_app: Azure Active Directory Application the DMS (classic) instance will use to + connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage + Account. Required. :paramtype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp """ super().__init__(**kwargs) @@ -11011,9 +11698,10 @@ def __init__( class MigrateSqlServerSqlMISyncTaskInput(SqlServerSqlMISyncTaskInput): - """Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online scenario. + """Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online + scenario. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar selected_databases: Databases to migrate. Required. :vartype selected_databases: @@ -11027,8 +11715,9 @@ class MigrateSqlServerSqlMISyncTaskInput(SqlServerSqlMISyncTaskInput): :ivar target_connection_info: Connection information for Azure SQL Database Managed Instance. Required. :vartype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo - :ivar azure_app: Azure Active Directory Application the DMS instance will use to connect to the - target instance of Azure SQL Database Managed Instance and the Azure Storage Account. Required. + :ivar azure_app: Azure Active Directory Application the DMS (classic) instance will use to + connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage + Account. Required. :vartype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp :ivar number_of_parallel_database_migrations: Number of database migrations to start in parallel. @@ -11063,8 +11752,8 @@ def __init__( azure_app: "_models.AzureActiveDirectoryApp", backup_file_share: Optional["_models.FileShare"] = None, number_of_parallel_database_migrations: Optional[float] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword selected_databases: Databases to migrate. Required. :paramtype selected_databases: @@ -11078,9 +11767,9 @@ def __init__( :keyword target_connection_info: Connection information for Azure SQL Database Managed Instance. Required. :paramtype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo - :keyword azure_app: Azure Active Directory Application the DMS instance will use to connect to - the target instance of Azure SQL Database Managed Instance and the Azure Storage Account. - Required. + :keyword azure_app: Azure Active Directory Application the DMS (classic) instance will use to + connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage + Account. Required. :paramtype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp :keyword number_of_parallel_database_migrations: Number of database migrations to start in parallel. @@ -11099,7 +11788,8 @@ def __init__( class MigrateSqlServerSqlMISyncTaskOutput(_serialization.Model): - """Output for task that migrates SQL Server databases to Azure SQL Database Managed Instance using Log Replay Service. + """Output for task that migrates SQL Server databases to Azure SQL Database Managed Instance using + Log Replay Service. You probably want to use the sub-classes and not this class directly. Known sub-classes are: MigrateSqlServerSqlMISyncTaskOutputDatabaseLevel, MigrateSqlServerSqlMISyncTaskOutputError, @@ -11107,7 +11797,7 @@ class MigrateSqlServerSqlMISyncTaskOutput(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -11133,7 +11823,7 @@ class MigrateSqlServerSqlMISyncTaskOutput(_serialization.Model): } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -11142,12 +11832,12 @@ def __init__(self, **kwargs): class MigrateSqlServerSqlMISyncTaskOutputDatabaseLevel( MigrateSqlServerSqlMISyncTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateSqlServerSqlMISyncTaskOutputDatabaseLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -11214,7 +11904,7 @@ class MigrateSqlServerSqlMISyncTaskOutputDatabaseLevel( "exceptions_and_warnings": {"key": "exceptionsAndWarnings", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "DatabaseLevelOutput" @@ -11236,7 +11926,7 @@ class MigrateSqlServerSqlMISyncTaskOutputError(MigrateSqlServerSqlMISyncTaskOutp Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -11258,7 +11948,7 @@ class MigrateSqlServerSqlMISyncTaskOutputError(MigrateSqlServerSqlMISyncTaskOutp "error": {"key": "error", "type": "ReportableException"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "ErrorOutput" @@ -11267,12 +11957,12 @@ def __init__(self, **kwargs): class MigrateSqlServerSqlMISyncTaskOutputMigrationLevel( MigrateSqlServerSqlMISyncTaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateSqlServerSqlMISyncTaskOutputMigrationLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -11335,7 +12025,7 @@ class MigrateSqlServerSqlMISyncTaskOutputMigrationLevel( "database_error_count": {"key": "databaseErrorCount", "type": "int"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "MigrationLevelOutput" @@ -11353,11 +12043,12 @@ def __init__(self, **kwargs): class MigrateSqlServerSqlMISyncTaskProperties(ProjectTaskProperties): - """Properties for task that migrates SQL Server databases to Azure SQL Database Managed Instance sync scenario. + """Properties for task that migrates SQL Server databases to Azure SQL Database Managed Instance + sync scenario. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -11420,8 +12111,8 @@ def __init__( client_data: Optional[Dict[str, str]] = None, input: Optional["_models.MigrateSqlServerSqlMISyncTaskInput"] = None, created_on: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -11440,7 +12131,7 @@ def __init__( class MigrateSqlServerSqlMITaskInput(SqlMigrationTaskInput): # pylint: disable=too-many-instance-attributes """Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Information for connecting to source. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -11507,8 +12198,8 @@ def __init__( backup_mode: Optional[Union[str, "_models.BackupMode"]] = None, aad_domain_name: Optional[str] = None, encrypted_key_for_secure_fields: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Information for connecting to source. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -11563,7 +12254,7 @@ class MigrateSqlServerSqlMITaskOutput(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -11591,19 +12282,19 @@ class MigrateSqlServerSqlMITaskOutput(_serialization.Model): } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None self.result_type: Optional[str] = None -class MigrateSqlServerSqlMITaskOutputAgentJobLevel(MigrateSqlServerSqlMITaskOutput): +class MigrateSqlServerSqlMITaskOutputAgentJobLevel(MigrateSqlServerSqlMITaskOutput): # pylint: disable=name-too-long """MigrateSqlServerSqlMITaskOutputAgentJobLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -11650,7 +12341,7 @@ class MigrateSqlServerSqlMITaskOutputAgentJobLevel(MigrateSqlServerSqlMITaskOutp "exceptions_and_warnings": {"key": "exceptionsAndWarnings", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "AgentJobLevelOutput" @@ -11663,12 +12354,12 @@ def __init__(self, **kwargs): self.exceptions_and_warnings = None -class MigrateSqlServerSqlMITaskOutputDatabaseLevel(MigrateSqlServerSqlMITaskOutput): +class MigrateSqlServerSqlMITaskOutputDatabaseLevel(MigrateSqlServerSqlMITaskOutput): # pylint: disable=name-too-long """MigrateSqlServerSqlMITaskOutputDatabaseLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -11720,7 +12411,7 @@ class MigrateSqlServerSqlMITaskOutputDatabaseLevel(MigrateSqlServerSqlMITaskOutp "exceptions_and_warnings": {"key": "exceptionsAndWarnings", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "DatabaseLevelOutput" @@ -11739,7 +12430,7 @@ class MigrateSqlServerSqlMITaskOutputError(MigrateSqlServerSqlMITaskOutput): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -11761,19 +12452,19 @@ class MigrateSqlServerSqlMITaskOutputError(MigrateSqlServerSqlMITaskOutput): "error": {"key": "error", "type": "ReportableException"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "ErrorOutput" self.error = None -class MigrateSqlServerSqlMITaskOutputLoginLevel(MigrateSqlServerSqlMITaskOutput): +class MigrateSqlServerSqlMITaskOutputLoginLevel(MigrateSqlServerSqlMITaskOutput): # pylint: disable=name-too-long """MigrateSqlServerSqlMITaskOutputLoginLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -11822,7 +12513,7 @@ class MigrateSqlServerSqlMITaskOutputLoginLevel(MigrateSqlServerSqlMITaskOutput) "exceptions_and_warnings": {"key": "exceptionsAndWarnings", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "LoginLevelOutput" @@ -11837,12 +12528,12 @@ def __init__(self, **kwargs): class MigrateSqlServerSqlMITaskOutputMigrationLevel( MigrateSqlServerSqlMITaskOutput -): # pylint: disable=too-many-instance-attributes +): # pylint: disable=too-many-instance-attributes,name-too-long """MigrateSqlServerSqlMITaskOutputMigrationLevel. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -11923,7 +12614,7 @@ class MigrateSqlServerSqlMITaskOutputMigrationLevel( "exceptions_and_warnings": {"key": "exceptionsAndWarnings", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "MigrationLevelOutput" @@ -11949,7 +12640,7 @@ class MigrateSqlServerSqlMITaskProperties(ProjectTaskProperties): # pylint: dis Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -12024,8 +12715,8 @@ def __init__( created_on: Optional[str] = None, parent_task_id: Optional[str] = None, is_cloneable: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -12051,9 +12742,10 @@ def __init__( class MigrateSsisTaskInput(SqlMigrationTaskInput): - """Input for task that migrates SSIS packages from SQL Server to Azure SQL Database Managed Instance. + """Input for task that migrates SSIS packages from SQL Server to Azure SQL Database Managed + Instance. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Information for connecting to source. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -12081,8 +12773,8 @@ def __init__( source_connection_info: "_models.SqlConnectionInfo", target_connection_info: "_models.SqlConnectionInfo", ssis_migration_info: "_models.SsisMigrationInfo", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Information for connecting to source. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -12098,14 +12790,15 @@ def __init__( class MigrateSsisTaskOutput(_serialization.Model): - """Output for task that migrates SSIS packages from SQL Server to Azure SQL Database Managed Instance. + """Output for task that migrates SSIS packages from SQL Server to Azure SQL Database Managed + Instance. You probably want to use the sub-classes and not this class directly. Known sub-classes are: MigrateSsisTaskOutputMigrationLevel, MigrateSsisTaskOutputProjectLevel Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -12130,7 +12823,7 @@ class MigrateSsisTaskOutput(_serialization.Model): } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -12142,7 +12835,7 @@ class MigrateSsisTaskOutputMigrationLevel(MigrateSsisTaskOutput): # pylint: dis Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -12203,7 +12896,7 @@ class MigrateSsisTaskOutputMigrationLevel(MigrateSsisTaskOutput): # pylint: dis "stage": {"key": "stage", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "MigrationLevelOutput" @@ -12224,7 +12917,7 @@ class MigrateSsisTaskOutputProjectLevel(MigrateSsisTaskOutput): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar id: Result identifier. :vartype id: str @@ -12276,7 +12969,7 @@ class MigrateSsisTaskOutputProjectLevel(MigrateSsisTaskOutput): "exceptions_and_warnings": {"key": "exceptionsAndWarnings", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_type: str = "SsisProjectLevelOutput" @@ -12291,11 +12984,12 @@ def __init__(self, **kwargs): class MigrateSsisTaskProperties(ProjectTaskProperties): - """Properties for task that migrates SSIS packages from SQL Server databases to Azure SQL Database Managed Instance. + """Properties for task that migrates SSIS packages from SQL Server databases to Azure SQL Database + Managed Instance. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -12354,8 +13048,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.MigrateSsisTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -12371,7 +13065,7 @@ def __init__( class MigrateSyncCompleteCommandInput(_serialization.Model): """Input for command that completes sync migration for a database. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar database_name: Name of database. Required. :vartype database_name: str @@ -12388,7 +13082,9 @@ class MigrateSyncCompleteCommandInput(_serialization.Model): "commit_time_stamp": {"key": "commitTimeStamp", "type": "iso-8601"}, } - def __init__(self, *, database_name: str, commit_time_stamp: Optional[datetime.datetime] = None, **kwargs): + def __init__( + self, *, database_name: str, commit_time_stamp: Optional[datetime.datetime] = None, **kwargs: Any + ) -> None: """ :keyword database_name: Name of database. Required. :paramtype database_name: str @@ -12421,7 +13117,7 @@ class MigrateSyncCompleteCommandOutput(_serialization.Model): "errors": {"key": "errors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -12433,7 +13129,7 @@ class MigrateSyncCompleteCommandProperties(CommandProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar command_type: Command type. Required. Known values are: "Migrate.Sync.Complete.Database", "Migrate.SqlServer.AzureDbSqlMi.Complete", "cancel", "finish", and "restart". @@ -12472,8 +13168,8 @@ def __init__( *, input: Optional["_models.MigrateSyncCompleteCommandInput"] = None, command_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword input: Command input. :paramtype input: ~azure.mgmt.datamigration.models.MigrateSyncCompleteCommandInput @@ -12487,84 +13183,248 @@ def __init__( self.command_id = command_id -class MigrationEligibilityInfo(_serialization.Model): - """Information about migration eligibility of a server object. +class MigrationEligibilityInfo(_serialization.Model): + """Information about migration eligibility of a server object. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar is_eligible_for_migration: Whether object is eligible for migration or not. + :vartype is_eligible_for_migration: bool + :ivar validation_messages: Information about eligibility failure for the server object. + :vartype validation_messages: list[str] + """ + + _validation = { + "is_eligible_for_migration": {"readonly": True}, + "validation_messages": {"readonly": True}, + } + + _attribute_map = { + "is_eligible_for_migration": {"key": "isEligibleForMigration", "type": "bool"}, + "validation_messages": {"key": "validationMessages", "type": "[str]"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.is_eligible_for_migration = None + self.validation_messages = None + + +class MigrationOperationInput(_serialization.Model): + """Migration Operation Input. + + :ivar migration_operation_id: ID tracking migration operation. + :vartype migration_operation_id: str + """ + + _attribute_map = { + "migration_operation_id": {"key": "migrationOperationId", "type": "str"}, + } + + def __init__(self, *, migration_operation_id: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword migration_operation_id: ID tracking migration operation. + :paramtype migration_operation_id: str + """ + super().__init__(**kwargs) + self.migration_operation_id = migration_operation_id + + +class MigrationReportResult(_serialization.Model): + """Migration validation report result, contains the url for downloading the generated report. + + :ivar id: Migration validation result identifier. + :vartype id: str + :ivar report_url: The url of the report. + :vartype report_url: str + """ + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "report_url": {"key": "reportUrl", "type": "str"}, + } + + def __init__( + self, + *, + id: Optional[str] = None, # pylint: disable=redefined-builtin + report_url: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword id: Migration validation result identifier. + :paramtype id: str + :keyword report_url: The url of the report. + :paramtype report_url: str + """ + super().__init__(**kwargs) + self.id = id + self.report_url = report_url + + +class TrackedResource(Resource): + """The resource model definition for an Azure Resource Manager tracked top level resource which + has 'tags' and a 'location'. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to server. + + :ivar id: Fully qualified resource ID for the resource. E.g. + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.datamigration.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + } + + def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str + """ + super().__init__(**kwargs) + self.tags = tags + self.location = location + + +class MigrationService(TrackedResource): + """A Migration Service. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to server. + + :ivar id: Fully qualified resource ID for the resource. E.g. + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.datamigration.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar provisioning_state: Provisioning state to track the async operation status. Known values + are: "Provisioning", "Updating", "Succeeded", "Failed", and "Canceled". + :vartype provisioning_state: str or ~azure.mgmt.datamigration.models.ProvisioningState + :ivar integration_runtime_state: Current state of the Integration runtime. + :vartype integration_runtime_state: str + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "provisioning_state": {"readonly": True}, + "integration_runtime_state": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, + "integration_runtime_state": {"key": "properties.integrationRuntimeState", "type": "str"}, + } + + def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str + """ + super().__init__(tags=tags, location=location, **kwargs) + self.provisioning_state = None + self.integration_runtime_state = None + + +class MigrationServiceListResult(_serialization.Model): + """A list of Migration Service. Variables are only populated by the server, and will be ignored when sending a request. - :ivar is_eligible_for_migration: Whether object is eligible for migration or not. - :vartype is_eligible_for_migration: bool - :ivar validation_messages: Information about eligibility failure for the server object. - :vartype validation_messages: list[str] + :ivar value: + :vartype value: list[~azure.mgmt.datamigration.models.MigrationService] + :ivar next_link: + :vartype next_link: str """ _validation = { - "is_eligible_for_migration": {"readonly": True}, - "validation_messages": {"readonly": True}, + "value": {"readonly": True}, + "next_link": {"readonly": True}, } _attribute_map = { - "is_eligible_for_migration": {"key": "isEligibleForMigration", "type": "bool"}, - "validation_messages": {"key": "validationMessages", "type": "[str]"}, + "value": {"key": "value", "type": "[MigrationService]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.is_eligible_for_migration = None - self.validation_messages = None - - -class MigrationOperationInput(_serialization.Model): - """Migration Operation Input. - - :ivar migration_operation_id: ID tracking migration operation. - :vartype migration_operation_id: str - """ - - _attribute_map = { - "migration_operation_id": {"key": "migrationOperationId", "type": "str"}, - } - - def __init__(self, *, migration_operation_id: Optional[str] = None, **kwargs): - """ - :keyword migration_operation_id: ID tracking migration operation. - :paramtype migration_operation_id: str - """ - super().__init__(**kwargs) - self.migration_operation_id = migration_operation_id + self.value = None + self.next_link = None -class MigrationReportResult(_serialization.Model): - """Migration validation report result, contains the url for downloading the generated report. +class MigrationServiceUpdate(_serialization.Model): + """An update to a Migration Service. - :ivar id: Migration validation result identifier. - :vartype id: str - :ivar report_url: The url of the report. - :vartype report_url: str + :ivar tags: Dictionary of :code:``. + :vartype tags: dict[str, str] """ _attribute_map = { - "id": {"key": "id", "type": "str"}, - "report_url": {"key": "reportUrl", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, } - def __init__( - self, - *, - id: Optional[str] = None, # pylint: disable=redefined-builtin - report_url: Optional[str] = None, - **kwargs - ): + def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: """ - :keyword id: Migration validation result identifier. - :paramtype id: str - :keyword report_url: The url of the report. - :paramtype report_url: str + :keyword tags: Dictionary of :code:``. + :paramtype tags: dict[str, str] """ super().__init__(**kwargs) - self.id = id - self.report_url = report_url + self.tags = tags class MigrationStatusDetails(_serialization.Model): # pylint: disable=too-many-instance-attributes @@ -12633,7 +13493,7 @@ class MigrationStatusDetails(_serialization.Model): # pylint: disable=too-many- "pending_log_backups_count": {"key": "pendingLogBackupsCount", "type": "int"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.migration_state = None @@ -12672,7 +13532,7 @@ class MigrationTableMetadata(_serialization.Model): "target_table_name": {"key": "targetTableName", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.source_table_name = None @@ -12722,7 +13582,7 @@ class MigrationValidationDatabaseSummaryResult(_serialization.Model): "status": {"key": "status", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -12762,8 +13622,8 @@ def __init__( enable_schema_validation: Optional[bool] = None, enable_data_integrity_validation: Optional[bool] = None, enable_query_analysis_validation: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword enable_schema_validation: Allows to compare the schema information between source and target. @@ -12786,7 +13646,7 @@ def __init__( class MiSqlConnectionInfo(ConnectionInfo): """Properties required to create a connection to Azure SQL database Managed instance. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Type of connection info. Required. :vartype type: str @@ -12817,8 +13677,8 @@ def __init__( managed_instance_resource_id: str, user_name: Optional[str] = None, password: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword user_name: User name. :paramtype user_name: str @@ -12833,12 +13693,72 @@ def __init__( self.managed_instance_resource_id = managed_instance_resource_id +class MongoConnectionInformation(_serialization.Model): + """Mongo Connection. + + :ivar host: Host of mongo connection. + :vartype host: str + :ivar port: Port of mongo connection. + :vartype port: int + :ivar user_name: User name to connect to Mongo. + :vartype user_name: str + :ivar password: Password to connect to Mongo. + :vartype password: str + :ivar use_ssl: Whether to UseSsl or UseTls to connect to Mongo. Default is true. + :vartype use_ssl: bool + :ivar connection_string: ConnectionString to connect to Mongo. + :vartype connection_string: str + """ + + _attribute_map = { + "host": {"key": "host", "type": "str"}, + "port": {"key": "port", "type": "int"}, + "user_name": {"key": "userName", "type": "str"}, + "password": {"key": "password", "type": "str"}, + "use_ssl": {"key": "useSsl", "type": "bool"}, + "connection_string": {"key": "connectionString", "type": "str"}, + } + + def __init__( + self, + *, + host: Optional[str] = None, + port: Optional[int] = None, + user_name: Optional[str] = None, + password: Optional[str] = None, + use_ssl: Optional[bool] = None, + connection_string: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword host: Host of mongo connection. + :paramtype host: str + :keyword port: Port of mongo connection. + :paramtype port: int + :keyword user_name: User name to connect to Mongo. + :paramtype user_name: str + :keyword password: Password to connect to Mongo. + :paramtype password: str + :keyword use_ssl: Whether to UseSsl or UseTls to connect to Mongo. Default is true. + :paramtype use_ssl: bool + :keyword connection_string: ConnectionString to connect to Mongo. + :paramtype connection_string: str + """ + super().__init__(**kwargs) + self.host = host + self.port = port + self.user_name = user_name + self.password = password + self.use_ssl = use_ssl + self.connection_string = connection_string + + class MongoDbCancelCommand(CommandProperties): """Properties for the command that cancels a migration in whole or in part. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar command_type: Command type. Required. Known values are: "Migrate.Sync.Complete.Database", "Migrate.SqlServer.AzureDbSqlMi.Complete", "cancel", "finish", and "restart". @@ -12865,7 +13785,7 @@ class MongoDbCancelCommand(CommandProperties): "input": {"key": "input", "type": "MongoDbCommandInput"}, } - def __init__(self, *, input: Optional["_models.MongoDbCommandInput"] = None, **kwargs): + def __init__(self, *, input: Optional["_models.MongoDbCommandInput"] = None, **kwargs: Any) -> None: """ :keyword input: Command input. :paramtype input: ~azure.mgmt.datamigration.models.MongoDbCommandInput @@ -12878,7 +13798,7 @@ def __init__(self, *, input: Optional["_models.MongoDbCommandInput"] = None, **k class MongoDbClusterInfo(_serialization.Model): """Describes a MongoDB data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar databases: A list of non-system databases in the cluster. Required. :vartype databases: list[~azure.mgmt.datamigration.models.MongoDbDatabaseInfo] @@ -12913,8 +13833,8 @@ def __init__( supports_sharding: bool, type: Union[str, "_models.MongoDbClusterType"], version: str, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword databases: A list of non-system databases in the cluster. Required. :paramtype databases: list[~azure.mgmt.datamigration.models.MongoDbDatabaseInfo] @@ -12937,7 +13857,7 @@ def __init__( class MongoDbObjectInfo(_serialization.Model): """Describes a database or collection within a MongoDB data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar average_document_size: The average document size, or -1 if the average size is unknown. Required. @@ -12979,8 +13899,8 @@ def __init__( document_count: int, name: str, qualified_name: str, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword average_document_size: The average document size, or -1 if the average size is unknown. Required. @@ -13008,7 +13928,7 @@ def __init__( class MongoDbCollectionInfo(MongoDbObjectInfo): # pylint: disable=too-many-instance-attributes """Describes a supported collection within a MongoDB database. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar average_document_size: The average document size, or -1 if the average size is unknown. Required. @@ -13085,8 +14005,8 @@ def __init__( supports_sharding: bool, shard_key: Optional["_models.MongoDbShardKeyInfo"] = None, view_of: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword average_document_size: The average document size, or -1 if the average size is unknown. Required. @@ -13142,7 +14062,7 @@ class MongoDbProgress(_serialization.Model): # pylint: disable=too-many-instanc You probably want to use the sub-classes and not this class directly. Known sub-classes are: MongoDbCollectionProgress, MongoDbDatabaseProgress, MongoDbMigrationProgress - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar bytes_copied: The number of document bytes copied during the Copying stage. Required. :vartype bytes_copied: int @@ -13241,8 +14161,8 @@ def __init__( last_replay_time: Optional[datetime.datetime] = None, name: Optional[str] = None, qualified_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword bytes_copied: The number of document bytes copied during the Copying stage. Required. :paramtype bytes_copied: int @@ -13303,7 +14223,7 @@ def __init__( class MongoDbCollectionProgress(MongoDbProgress): # pylint: disable=too-many-instance-attributes """Describes the progress of a collection. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar bytes_copied: The number of document bytes copied during the Copying stage. Required. :vartype bytes_copied: int @@ -13394,8 +14314,8 @@ def __init__( last_replay_time: Optional[datetime.datetime] = None, name: Optional[str] = None, qualified_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword bytes_copied: The number of document bytes copied during the Copying stage. Required. :paramtype bytes_copied: int @@ -13480,8 +14400,8 @@ def __init__( can_delete: Optional[bool] = None, shard_key: Optional["_models.MongoDbShardKeySetting"] = None, target_r_us: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword can_delete: Whether the migrator is allowed to drop the target collection in the course of performing a migration. The default is true. @@ -13510,7 +14430,7 @@ class MongoDbCommandInput(_serialization.Model): "object_name": {"key": "objectName", "type": "str"}, } - def __init__(self, *, object_name: Optional[str] = None, **kwargs): + def __init__(self, *, object_name: Optional[str] = None, **kwargs: Any) -> None: """ :keyword object_name: The qualified name of a database or collection to act upon, or null to act upon the entire migration. @@ -13523,7 +14443,7 @@ def __init__(self, *, object_name: Optional[str] = None, **kwargs): class MongoDbConnectionInfo(ConnectionInfo): # pylint: disable=too-many-instance-attributes """Describes a connection to a MongoDB data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Type of connection info. Required. :vartype type: str @@ -13596,8 +14516,8 @@ def __init__( port: Optional[int] = None, additional_settings: Optional[str] = None, authentication: Optional[Union[str, "_models.AuthenticationType"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword user_name: User name. :paramtype user_name: str @@ -13647,7 +14567,7 @@ def __init__( class MongoDbDatabaseInfo(MongoDbObjectInfo): """Describes a database within a MongoDB data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar average_document_size: The average document size, or -1 if the average size is unknown. Required. @@ -13700,8 +14620,8 @@ def __init__( qualified_name: str, collections: List["_models.MongoDbCollectionInfo"], supports_sharding: bool, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword average_document_size: The average document size, or -1 if the average size is unknown. Required. @@ -13738,7 +14658,7 @@ def __init__( class MongoDbDatabaseProgress(MongoDbProgress): # pylint: disable=too-many-instance-attributes """Describes the progress of a database. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar bytes_copied: The number of document bytes copied during the Copying stage. Required. :vartype bytes_copied: int @@ -13834,8 +14754,8 @@ def __init__( name: Optional[str] = None, qualified_name: Optional[str] = None, collections: Optional[Dict[str, "_models.MongoDbCollectionProgress"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword bytes_copied: The number of document bytes copied during the Copying stage. Required. :paramtype bytes_copied: int @@ -13902,7 +14822,7 @@ def __init__( class MongoDbDatabaseSettings(_serialization.Model): """Describes how an individual MongoDB database should be migrated. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar collections: The collections on the source database to migrate to the target. The keys are the unqualified names of the collections. Required. @@ -13927,8 +14847,8 @@ def __init__( *, collections: Dict[str, "_models.MongoDbCollectionSettings"], target_r_us: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword collections: The collections on the source database to migrate to the target. The keys are the unqualified names of the collections. Required. @@ -13971,8 +14891,8 @@ def __init__( count: Optional[int] = None, message: Optional[str] = None, type: Optional[Union[str, "_models.MongoDbErrorType"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword code: The non-localized, machine-readable code that describes the error or warning. :paramtype code: str @@ -13996,7 +14916,7 @@ class MongoDbFinishCommand(CommandProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar command_type: Command type. Required. Known values are: "Migrate.Sync.Complete.Database", "Migrate.SqlServer.AzureDbSqlMi.Complete", "cancel", "finish", and "restart". @@ -14023,7 +14943,7 @@ class MongoDbFinishCommand(CommandProperties): "input": {"key": "input", "type": "MongoDbFinishCommandInput"}, } - def __init__(self, *, input: Optional["_models.MongoDbFinishCommandInput"] = None, **kwargs): + def __init__(self, *, input: Optional["_models.MongoDbFinishCommandInput"] = None, **kwargs: Any) -> None: """ :keyword input: Command input. :paramtype input: ~azure.mgmt.datamigration.models.MongoDbFinishCommandInput @@ -14036,7 +14956,7 @@ def __init__(self, *, input: Optional["_models.MongoDbFinishCommandInput"] = Non class MongoDbFinishCommandInput(MongoDbCommandInput): """Describes the input to the 'finish' MongoDB migration command. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar object_name: The qualified name of a database or collection to act upon, or null to act upon the entire migration. @@ -14056,7 +14976,7 @@ class MongoDbFinishCommandInput(MongoDbCommandInput): "immediate": {"key": "immediate", "type": "bool"}, } - def __init__(self, *, immediate: bool, object_name: Optional[str] = None, **kwargs): + def __init__(self, *, immediate: bool, object_name: Optional[str] = None, **kwargs: Any) -> None: """ :keyword object_name: The qualified name of a database or collection to act upon, or null to act upon the entire migration. @@ -14073,7 +14993,7 @@ def __init__(self, *, immediate: bool, object_name: Optional[str] = None, **kwar class MongoDbMigrationProgress(MongoDbProgress): # pylint: disable=too-many-instance-attributes """Describes the progress of the overall migration. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar bytes_copied: The number of document bytes copied during the Copying stage. Required. :vartype bytes_copied: int @@ -14169,8 +15089,8 @@ def __init__( name: Optional[str] = None, qualified_name: Optional[str] = None, databases: Optional[Dict[str, "_models.MongoDbDatabaseProgress"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword bytes_copied: The number of document bytes copied during the Copying stage. Required. :paramtype bytes_copied: int @@ -14237,7 +15157,7 @@ def __init__( class MongoDbMigrationSettings(_serialization.Model): """Describes how a MongoDB data migration should be performed. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar boost_r_us: The RU limit on a CosmosDB target that collections will be temporarily increased to (if lower) during the initial copy of a migration, from 10,000 to 1,000,000, or 0 @@ -14282,8 +15202,8 @@ def __init__( boost_r_us: Optional[int] = None, replication: Optional[Union[str, "_models.MongoDbReplication"]] = None, throttling: Optional["_models.MongoDbThrottlingSettings"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword boost_r_us: The RU limit on a CosmosDB target that collections will be temporarily increased to (if lower) during the initial copy of a migration, from 10,000 to 1,000,000, or 0 @@ -14317,7 +15237,7 @@ class MongoDbRestartCommand(CommandProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar command_type: Command type. Required. Known values are: "Migrate.Sync.Complete.Database", "Migrate.SqlServer.AzureDbSqlMi.Complete", "cancel", "finish", and "restart". @@ -14344,7 +15264,7 @@ class MongoDbRestartCommand(CommandProperties): "input": {"key": "input", "type": "MongoDbCommandInput"}, } - def __init__(self, *, input: Optional["_models.MongoDbCommandInput"] = None, **kwargs): + def __init__(self, *, input: Optional["_models.MongoDbCommandInput"] = None, **kwargs: Any) -> None: """ :keyword input: Command input. :paramtype input: ~azure.mgmt.datamigration.models.MongoDbCommandInput @@ -14357,7 +15277,7 @@ def __init__(self, *, input: Optional["_models.MongoDbCommandInput"] = None, **k class MongoDbShardKeyField(_serialization.Model): """Describes a field reference within a MongoDB shard key. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar name: The name of the field. Required. :vartype name: str @@ -14376,7 +15296,7 @@ class MongoDbShardKeyField(_serialization.Model): "order": {"key": "order", "type": "str"}, } - def __init__(self, *, name: str, order: Union[str, "_models.MongoDbShardKeyOrder"], **kwargs): + def __init__(self, *, name: str, order: Union[str, "_models.MongoDbShardKeyOrder"], **kwargs: Any) -> None: """ :keyword name: The name of the field. Required. :paramtype name: str @@ -14392,7 +15312,7 @@ def __init__(self, *, name: str, order: Union[str, "_models.MongoDbShardKeyOrder class MongoDbShardKeyInfo(_serialization.Model): """Describes a MongoDB shard key. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar fields: The fields within the shard key. Required. :vartype fields: list[~azure.mgmt.datamigration.models.MongoDbShardKeyField] @@ -14410,7 +15330,7 @@ class MongoDbShardKeyInfo(_serialization.Model): "is_unique": {"key": "isUnique", "type": "bool"}, } - def __init__(self, *, fields: List["_models.MongoDbShardKeyField"], is_unique: bool, **kwargs): + def __init__(self, *, fields: List["_models.MongoDbShardKeyField"], is_unique: bool, **kwargs: Any) -> None: """ :keyword fields: The fields within the shard key. Required. :paramtype fields: list[~azure.mgmt.datamigration.models.MongoDbShardKeyField] @@ -14425,7 +15345,7 @@ def __init__(self, *, fields: List["_models.MongoDbShardKeyField"], is_unique: b class MongoDbShardKeySetting(_serialization.Model): """Describes a MongoDB shard key. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar fields: The fields within the shard key. Required. :vartype fields: list[~azure.mgmt.datamigration.models.MongoDbShardKeyField] @@ -14442,7 +15362,9 @@ class MongoDbShardKeySetting(_serialization.Model): "is_unique": {"key": "isUnique", "type": "bool"}, } - def __init__(self, *, fields: List["_models.MongoDbShardKeyField"], is_unique: Optional[bool] = None, **kwargs): + def __init__( + self, *, fields: List["_models.MongoDbShardKeyField"], is_unique: Optional[bool] = None, **kwargs: Any + ) -> None: """ :keyword fields: The fields within the shard key. Required. :paramtype fields: list[~azure.mgmt.datamigration.models.MongoDbShardKeyField] @@ -14480,8 +15402,8 @@ def __init__( min_free_cpu: Optional[int] = None, min_free_memory_mb: Optional[int] = None, max_parallelism: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword min_free_cpu: The percentage of CPU time that the migrator will try to avoid using, from 0 to 100. @@ -14499,10 +15421,111 @@ def __init__( self.max_parallelism = max_parallelism +class MongoMigrationCollection(_serialization.Model): + """Mongo source and target database and collection details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar source_database: Source database name. + :vartype source_database: str + :ivar source_collection: Source collection name. + :vartype source_collection: str + :ivar target_database: Target database name. + :vartype target_database: str + :ivar target_collection: Target collection name. + :vartype target_collection: str + :ivar migration_progress_details: Detailed migration status. Not included by default. + :vartype migration_progress_details: + ~azure.mgmt.datamigration.models.MongoMigrationProgressDetails + """ + + _validation = { + "migration_progress_details": {"readonly": True}, + } + + _attribute_map = { + "source_database": {"key": "sourceDatabase", "type": "str"}, + "source_collection": {"key": "sourceCollection", "type": "str"}, + "target_database": {"key": "targetDatabase", "type": "str"}, + "target_collection": {"key": "targetCollection", "type": "str"}, + "migration_progress_details": {"key": "migrationProgressDetails", "type": "MongoMigrationProgressDetails"}, + } + + def __init__( + self, + *, + source_database: Optional[str] = None, + source_collection: Optional[str] = None, + target_database: Optional[str] = None, + target_collection: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword source_database: Source database name. + :paramtype source_database: str + :keyword source_collection: Source collection name. + :paramtype source_collection: str + :keyword target_database: Target database name. + :paramtype target_database: str + :keyword target_collection: Target collection name. + :paramtype target_collection: str + """ + super().__init__(**kwargs) + self.source_database = source_database + self.source_collection = source_collection + self.target_database = target_database + self.target_collection = target_collection + self.migration_progress_details = None + + +class MongoMigrationProgressDetails(_serialization.Model): + """Detailed status of collection migration. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar migration_status: Migration Status. Known values are: "NotStarted", "InProgress", + "Completed", "Failed", and "Canceled". + :vartype migration_status: str or ~azure.mgmt.datamigration.models.MongoMigrationStatus + :ivar migration_error: Migration Error. + :vartype migration_error: str + :ivar source_document_count: Source Document Count. + :vartype source_document_count: int + :ivar processed_document_count: Processed Document Count. + :vartype processed_document_count: int + :ivar duration_in_seconds: Migration duration. + :vartype duration_in_seconds: int + """ + + _validation = { + "migration_status": {"readonly": True}, + "migration_error": {"readonly": True}, + "source_document_count": {"readonly": True}, + "processed_document_count": {"readonly": True}, + "duration_in_seconds": {"readonly": True}, + } + + _attribute_map = { + "migration_status": {"key": "migrationStatus", "type": "str"}, + "migration_error": {"key": "migrationError", "type": "str"}, + "source_document_count": {"key": "sourceDocumentCount", "type": "int"}, + "processed_document_count": {"key": "processedDocumentCount", "type": "int"}, + "duration_in_seconds": {"key": "durationInSeconds", "type": "int"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.migration_status = None + self.migration_error = None + self.source_document_count = None + self.processed_document_count = None + self.duration_in_seconds = None + + class MySqlConnectionInfo(ConnectionInfo): """Information for connecting to MySQL server. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Type of connection info. Required. :vartype type: str @@ -14555,8 +15578,8 @@ def __init__( encrypt_connection: bool = True, authentication: Optional[Union[str, "_models.AuthenticationType"]] = None, additional_settings: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword user_name: User name. :paramtype user_name: str @@ -14601,7 +15624,7 @@ class NameAvailabilityRequest(_serialization.Model): "type": {"key": "type", "type": "str"}, } - def __init__(self, *, name: Optional[str] = None, type: Optional[str] = None, **kwargs): + def __init__(self, *, name: Optional[str] = None, type: Optional[str] = None, **kwargs: Any) -> None: """ :keyword name: The proposed resource name. :paramtype name: str @@ -14638,8 +15661,8 @@ def __init__( name_available: Optional[bool] = None, reason: Optional[Union[str, "_models.NameCheckFailureReason"]] = None, message: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name_available: If true, the name is valid and available. If false, 'reason' describes why not. @@ -14708,7 +15731,7 @@ class NodeMonitoringData(_serialization.Model): "received_bytes": {"key": "receivedBytes", "type": "float"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.additional_properties = None @@ -14733,7 +15756,7 @@ class NonSqlDataMigrationTable(_serialization.Model): "source_name": {"key": "sourceName", "type": "str"}, } - def __init__(self, *, source_name: Optional[str] = None, **kwargs): + def __init__(self, *, source_name: Optional[str] = None, **kwargs: Any) -> None: """ :keyword source_name: Source table name. :paramtype source_name: str @@ -14785,7 +15808,7 @@ class NonSqlDataMigrationTableResult(_serialization.Model): "errors": {"key": "errors", "type": "[DataMigrationError]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.result_code = None @@ -14800,7 +15823,7 @@ def __init__(self, **kwargs): class NonSqlMigrationTaskInput(_serialization.Model): """Base class for non sql migration task input. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar target_connection_info: Information for connecting to target. Required. :vartype target_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -14839,8 +15862,8 @@ def __init__( project_name: str, project_location: str, selected_tables: List["_models.NonSqlDataMigrationTable"], - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword target_connection_info: Information for connecting to target. Required. :paramtype target_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -14910,7 +15933,7 @@ class NonSqlMigrationTaskOutput(_serialization.Model): "target_server_name": {"key": "targetServerName", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -14947,8 +15970,8 @@ def __init__( code: Optional[str] = None, message: Optional[str] = None, details: Optional[List["_models.ODataError"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword code: The machine-readable description of the error, such as 'InvalidRequest' or 'InternalServerError'. @@ -14980,7 +16003,9 @@ class OfflineConfiguration(_serialization.Model): "last_backup_name": {"key": "lastBackupName", "type": "str"}, } - def __init__(self, *, offline: Optional[bool] = None, last_backup_name: Optional[str] = None, **kwargs): + def __init__( + self, *, offline: Optional[bool] = None, last_backup_name: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword offline: Offline migration. :paramtype offline: bool @@ -15015,7 +16040,7 @@ class OperationListResult(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.value = None @@ -15054,7 +16079,7 @@ class OperationsDefinition(_serialization.Model): "properties": {"key": "properties", "type": "{object}"}, } - def __init__(self, *, is_data_action: Optional[bool] = None, **kwargs): + def __init__(self, *, is_data_action: Optional[bool] = None, **kwargs: Any) -> None: """ :keyword is_data_action: Indicates whether the operation is a data action. :paramtype is_data_action: bool @@ -15096,7 +16121,7 @@ class OperationsDisplayDefinition(_serialization.Model): "description": {"key": "description", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.provider = None @@ -15108,7 +16133,7 @@ def __init__(self, **kwargs): class OracleConnectionInfo(ConnectionInfo): """Information for connecting to Oracle server. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Type of connection info. Required. :vartype type: str @@ -15156,8 +16181,8 @@ def __init__( server_version: Optional[str] = None, port: Optional[int] = None, authentication: Optional[Union[str, "_models.AuthenticationType"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword user_name: User name. :paramtype user_name: str @@ -15223,7 +16248,7 @@ class OracleOCIDriverInfo(_serialization.Model): "supported_oracle_versions": {"key": "supportedOracleVersions", "type": "[str]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.driver_name = None @@ -15248,7 +16273,7 @@ class OrphanedUserInfo(_serialization.Model): "database_name": {"key": "databaseName", "type": "str"}, } - def __init__(self, *, name: Optional[str] = None, database_name: Optional[str] = None, **kwargs): + def __init__(self, *, name: Optional[str] = None, database_name: Optional[str] = None, **kwargs: Any) -> None: """ :keyword name: Name of the orphaned user. :paramtype name: str @@ -15263,7 +16288,7 @@ def __init__(self, *, name: Optional[str] = None, database_name: Optional[str] = class PostgreSqlConnectionInfo(ConnectionInfo): # pylint: disable=too-many-instance-attributes """Information for connecting to PostgreSQL server. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Type of connection info. Required. :vartype type: str @@ -15332,8 +16357,8 @@ def __init__( additional_settings: Optional[str] = None, server_brand_version: Optional[str] = None, authentication: Optional[Union[str, "_models.AuthenticationType"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword user_name: User name. :paramtype user_name: str @@ -15376,7 +16401,7 @@ def __init__( self.authentication = authentication -class Project(TrackedResource): # pylint: disable=too-many-instance-attributes +class Project(TrackedResourceAutoGenerated): # pylint: disable=too-many-instance-attributes """A project resource. Variables are only populated by the server, and will be ignored when sending a request. @@ -15392,7 +16417,7 @@ class Project(TrackedResource): # pylint: disable=too-many-instance-attributes :ivar type: :vartype type: str :ivar system_data: - :vartype system_data: ~azure.mgmt.datamigration.models.SystemData + :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated :ivar etag: HTTP strong entity tag value. This is ignored if submitted. :vartype etag: str :ivar source_platform: Source platform for the project. Known values are: "SQL", "MySQL", @@ -15432,7 +16457,7 @@ class Project(TrackedResource): # pylint: disable=too-many-instance-attributes "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, + "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"}, "etag": {"key": "etag", "type": "str"}, "source_platform": {"key": "properties.sourcePlatform", "type": "str"}, "azure_authentication_info": {"key": "properties.azureAuthenticationInfo", "type": "AzureActiveDirectoryApp"}, @@ -15456,8 +16481,8 @@ def __init__( source_connection_info: Optional["_models.ConnectionInfo"] = None, target_connection_info: Optional["_models.ConnectionInfo"] = None, databases_info: Optional[List["_models.DatabaseInfo"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword location: :paramtype location: str @@ -15493,7 +16518,7 @@ def __init__( self.provisioning_state = None -class Resource(_serialization.Model): +class ResourceAutoGenerated(_serialization.Model): """ARM resource. Variables are only populated by the server, and will be ignored when sending a request. @@ -15518,7 +16543,7 @@ class Resource(_serialization.Model): "type": {"key": "type", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -15526,7 +16551,7 @@ def __init__(self, **kwargs): self.type = None -class ProjectFile(Resource): +class ProjectFile(ResourceAutoGenerated): """A file resource. Variables are only populated by the server, and will be ignored when sending a request. @@ -15542,7 +16567,7 @@ class ProjectFile(Resource): :ivar properties: Custom file properties. :vartype properties: ~azure.mgmt.datamigration.models.ProjectFileProperties :ivar system_data: Metadata pertaining to creation and last modification of the resource. - :vartype system_data: ~azure.mgmt.datamigration.models.SystemData + :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated """ _validation = { @@ -15558,12 +16583,12 @@ class ProjectFile(Resource): "type": {"key": "type", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "properties": {"key": "properties", "type": "ProjectFileProperties"}, - "system_data": {"key": "systemData", "type": "SystemData"}, + "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"}, } def __init__( - self, *, etag: Optional[str] = None, properties: Optional["_models.ProjectFileProperties"] = None, **kwargs - ): + self, *, etag: Optional[str] = None, properties: Optional["_models.ProjectFileProperties"] = None, **kwargs: Any + ) -> None: """ :keyword etag: HTTP strong entity tag value. This is ignored if submitted. :paramtype etag: str @@ -15615,8 +16640,8 @@ def __init__( extension: Optional[str] = None, file_path: Optional[str] = None, media_type: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword extension: Optional File extension. If submitted it should not have a leading period and must match the extension from filePath. @@ -15650,7 +16675,9 @@ class ProjectList(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, *, value: Optional[List["_models.Project"]] = None, next_link: Optional[str] = None, **kwargs): + def __init__( + self, *, value: Optional[List["_models.Project"]] = None, next_link: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword value: List of projects. :paramtype value: list[~azure.mgmt.datamigration.models.Project] @@ -15662,7 +16689,7 @@ def __init__(self, *, value: Optional[List["_models.Project"]] = None, next_link self.next_link = next_link -class ProjectTask(Resource): +class ProjectTask(ResourceAutoGenerated): """A task resource. Variables are only populated by the server, and will be ignored when sending a request. @@ -15678,7 +16705,7 @@ class ProjectTask(Resource): :ivar properties: Custom task properties. :vartype properties: ~azure.mgmt.datamigration.models.ProjectTaskProperties :ivar system_data: Metadata pertaining to creation and last modification of the resource. - :vartype system_data: ~azure.mgmt.datamigration.models.SystemData + :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated """ _validation = { @@ -15694,12 +16721,12 @@ class ProjectTask(Resource): "type": {"key": "type", "type": "str"}, "etag": {"key": "etag", "type": "str"}, "properties": {"key": "properties", "type": "ProjectTaskProperties"}, - "system_data": {"key": "systemData", "type": "SystemData"}, + "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"}, } def __init__( - self, *, etag: Optional[str] = None, properties: Optional["_models.ProjectTaskProperties"] = None, **kwargs - ): + self, *, etag: Optional[str] = None, properties: Optional["_models.ProjectTaskProperties"] = None, **kwargs: Any + ) -> None: """ :keyword etag: HTTP strong entity tag value. This is ignored if submitted. :paramtype etag: str @@ -15731,8 +16758,8 @@ def __init__( *, query_results: Optional["_models.QueryExecutionResult"] = None, validation_errors: Optional["_models.ValidationError"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword query_results: List of queries executed and it's execution results in source and target. @@ -15772,8 +16799,8 @@ def __init__( statements_in_batch: Optional[int] = None, source_result: Optional["_models.ExecutionStatistics"] = None, target_result: Optional["_models.ExecutionStatistics"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword query_text: Query text retrieved from the source server. :paramtype query_text: str @@ -15824,8 +16851,8 @@ def __init__( limit: Optional[float] = None, name: Optional["_models.QuotaName"] = None, unit: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword current_value: The current value of the quota. If null or missing, the current value cannot be determined in the context of the request. @@ -15863,7 +16890,9 @@ class QuotaList(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, *, value: Optional[List["_models.Quota"]] = None, next_link: Optional[str] = None, **kwargs): + def __init__( + self, *, value: Optional[List["_models.Quota"]] = None, next_link: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword value: List of quotas. :paramtype value: list[~azure.mgmt.datamigration.models.Quota] @@ -15890,7 +16919,7 @@ class QuotaName(_serialization.Model): "value": {"key": "value", "type": "str"}, } - def __init__(self, *, localized_value: Optional[str] = None, value: Optional[str] = None, **kwargs): + def __init__(self, *, localized_value: Optional[str] = None, value: Optional[str] = None, **kwargs: Any) -> None: """ :keyword localized_value: The localized name of the quota. :paramtype localized_value: str @@ -15925,8 +16954,8 @@ def __init__( key_name: Optional[str] = None, auth_key1: Optional[str] = None, auth_key2: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword key_name: The name of authentication key to generate. :paramtype key_name: str @@ -15976,8 +17005,8 @@ def __init__( line_number: Optional[str] = None, h_result: Optional[int] = None, stack_trace: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword message: Error message. :paramtype message: str @@ -16002,7 +17031,7 @@ def __init__( class ResourceSku(_serialization.Model): # pylint: disable=too-many-instance-attributes - """Describes an available DMS SKU. + """Describes an available DMS (classic) SKU. Variables are only populated by the server, and will be ignored when sending a request. @@ -16010,7 +17039,7 @@ class ResourceSku(_serialization.Model): # pylint: disable=too-many-instance-at :vartype resource_type: str :ivar name: The name of SKU. :vartype name: str - :ivar tier: Specifies the tier of DMS in a scale set. + :ivar tier: Specifies the tier of DMS (classic) in a scale set. :vartype tier: str :ivar size: The Size of the SKU. :vartype size: str @@ -16063,7 +17092,7 @@ class ResourceSku(_serialization.Model): # pylint: disable=too-many-instance-at "restrictions": {"key": "restrictions", "type": "[ResourceSkuRestrictions]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.resource_type = None @@ -16101,7 +17130,7 @@ class ResourceSkuCapabilities(_serialization.Model): "value": {"key": "value", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.name = None @@ -16138,7 +17167,7 @@ class ResourceSkuCapacity(_serialization.Model): "scale_type": {"key": "scaleType", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.minimum = None @@ -16172,7 +17201,7 @@ class ResourceSkuCosts(_serialization.Model): "extended_unit": {"key": "extendedUnit", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.meter_id = None @@ -16207,7 +17236,7 @@ class ResourceSkuRestrictions(_serialization.Model): "reason_code": {"key": "reasonCode", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.type = None @@ -16216,14 +17245,14 @@ def __init__(self, **kwargs): class ResourceSkusResult(_serialization.Model): - """The DMS List SKUs operation response. + """The DMS (classic) List SKUs operation response. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar value: The list of SKUs available for the subscription. Required. :vartype value: list[~azure.mgmt.datamigration.models.ResourceSku] - :ivar next_link: The uri to fetch the next page of DMS SKUs. Call ListNext() with this to fetch - the next page of DMS SKUs. + :ivar next_link: The uri to fetch the next page of DMS (classic) SKUs. Call ListNext() with + this to fetch the next page of DMS (classic) SKUs. :vartype next_link: str """ @@ -16236,12 +17265,12 @@ class ResourceSkusResult(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, *, value: List["_models.ResourceSku"], next_link: Optional[str] = None, **kwargs): + def __init__(self, *, value: List["_models.ResourceSku"], next_link: Optional[str] = None, **kwargs: Any) -> None: """ :keyword value: The list of SKUs available for the subscription. Required. :paramtype value: list[~azure.mgmt.datamigration.models.ResourceSku] - :keyword next_link: The uri to fetch the next page of DMS SKUs. Call ListNext() with this to - fetch the next page of DMS SKUs. + :keyword next_link: The uri to fetch the next page of DMS (classic) SKUs. Call ListNext() with + this to fetch the next page of DMS (classic) SKUs. :paramtype next_link: str """ super().__init__(**kwargs) @@ -16278,8 +17307,8 @@ def __init__( validation_errors: Optional["_models.ValidationError"] = None, source_database_object_count: Optional[Dict[str, int]] = None, target_database_object_count: Optional[Dict[str, int]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword schema_differences: List of schema differences between the source and target databases. @@ -16326,8 +17355,8 @@ def __init__( object_name: Optional[str] = None, object_type: Optional[Union[str, "_models.ObjectType"]] = None, update_action: Optional[Union[str, "_models.UpdateActionType"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword object_name: Name of the object that has the difference. :paramtype object_name: str @@ -16369,8 +17398,8 @@ def __init__( schema_option: Optional[Union[str, "_models.SchemaMigrationOption"]] = None, file_id: Optional[str] = None, file_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword schema_option: Option on how to migrate the schema. Known values are: "None", "ExtractFromSource", and "UseStorageFile". @@ -16389,7 +17418,7 @@ def __init__( class SelectedCertificateInput(_serialization.Model): """Info for certificate to be exported for TDE enabled databases. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar certificate_name: Name of certificate to be exported. Required. :vartype certificate_name: str @@ -16407,7 +17436,7 @@ class SelectedCertificateInput(_serialization.Model): "password": {"key": "password", "type": "str"}, } - def __init__(self, *, certificate_name: str, password: str, **kwargs): + def __init__(self, *, certificate_name: str, password: str, **kwargs: Any) -> None: """ :keyword certificate_name: Name of certificate to be exported. Required. :paramtype certificate_name: str @@ -16456,7 +17485,7 @@ class ServerProperties(_serialization.Model): "server_database_count": {"key": "serverDatabaseCount", "type": "int"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.server_platform = None @@ -16468,7 +17497,7 @@ def __init__(self, **kwargs): class ServiceOperation(_serialization.Model): - """Description of an action supported by the Database Migration Service. + """Description of an action supported by the Azure Database Migration Service (classic). :ivar name: The fully qualified action name, e.g. Microsoft.DataMigration/services/read. :vartype name: str @@ -16482,8 +17511,8 @@ class ServiceOperation(_serialization.Model): } def __init__( - self, *, name: Optional[str] = None, display: Optional["_models.ServiceOperationDisplay"] = None, **kwargs - ): + self, *, name: Optional[str] = None, display: Optional["_models.ServiceOperationDisplay"] = None, **kwargs: Any + ) -> None: """ :keyword name: The fully qualified action name, e.g. Microsoft.DataMigration/services/read. :paramtype name: str @@ -16522,8 +17551,8 @@ def __init__( resource: Optional[str] = None, operation: Optional[str] = None, description: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword provider: The localized resource provider name. :paramtype provider: str @@ -16556,8 +17585,12 @@ class ServiceOperationList(_serialization.Model): } def __init__( - self, *, value: Optional[List["_models.ServiceOperation"]] = None, next_link: Optional[str] = None, **kwargs - ): + self, + *, + value: Optional[List["_models.ServiceOperation"]] = None, + next_link: Optional[str] = None, + **kwargs: Any + ) -> None: """ :keyword value: List of actions. :paramtype value: list[~azure.mgmt.datamigration.models.ServiceOperation] @@ -16602,8 +17635,8 @@ def __init__( family: Optional[str] = None, size: Optional[str] = None, capacity: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: The unique name of the SKU, such as 'P3'. :paramtype name: str @@ -16641,8 +17674,12 @@ class ServiceSkuList(_serialization.Model): } def __init__( - self, *, value: Optional[List["_models.AvailableServiceSku"]] = None, next_link: Optional[str] = None, **kwargs - ): + self, + *, + value: Optional[List["_models.AvailableServiceSku"]] = None, + next_link: Optional[str] = None, + **kwargs: Any + ) -> None: """ :keyword value: List of service SKUs. :paramtype value: list[~azure.mgmt.datamigration.models.AvailableServiceSku] @@ -16682,8 +17719,8 @@ def __init__( *, file_share: Optional["_models.SqlFileShare"] = None, azure_blob: Optional["_models.AzureBlob"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword file_share: Source File share. :paramtype file_share: ~azure.mgmt.datamigration.models.SqlFileShare @@ -16742,7 +17779,7 @@ class SqlBackupFileInfo(_serialization.Model): "family_sequence_number": {"key": "familySequenceNumber", "type": "int"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.file_name = None @@ -16812,7 +17849,7 @@ class SqlBackupSetInfo(_serialization.Model): # pylint: disable=too-many-instan "ignore_reasons": {"key": "ignoreReasons", "type": "[str]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.backup_set_id = None @@ -16831,7 +17868,7 @@ def __init__(self, **kwargs): class SqlConnectionInfo(ConnectionInfo): # pylint: disable=too-many-instance-attributes """Information for connecting to SQL database server. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Type of connection info. Required. :vartype type: str @@ -16840,7 +17877,7 @@ class SqlConnectionInfo(ConnectionInfo): # pylint: disable=too-many-instance-at :ivar password: Password credential. :vartype password: str :ivar data_source: Data source in the format - Protocol:MachineName\SQLServerInstanceName,PortNumber. Required. + Protocol:MachineName\\SQLServerInstanceName,PortNumber. Required. :vartype data_source: str :ivar server_name: name of the server. :vartype server_name: str @@ -16905,15 +17942,15 @@ def __init__( additional_settings: Optional[str] = None, trust_server_certificate: bool = False, platform: Optional[Union[str, "_models.SqlSourcePlatform"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword user_name: User name. :paramtype user_name: str :keyword password: Password credential. :paramtype password: str :keyword data_source: Data source in the format - Protocol:MachineName\SQLServerInstanceName,PortNumber. Required. + Protocol:MachineName\\SQLServerInstanceName,PortNumber. Required. :paramtype data_source: str :keyword server_name: name of the server. :paramtype server_name: str @@ -16989,8 +18026,8 @@ def __init__( password: Optional[str] = None, encrypt_connection: Optional[bool] = None, trust_server_certificate: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword data_source: Data source. :paramtype data_source: str @@ -17040,7 +18077,7 @@ class SqlDbMigrationStatusDetails(_serialization.Model): "list_of_copy_progress_details": {"key": "listOfCopyProgressDetails", "type": "[CopyProgressDetails]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.migration_state = None @@ -17065,7 +18102,7 @@ class SqlDbOfflineConfiguration(_serialization.Model): "offline": {"key": "offline", "type": "bool"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.offline = None @@ -17089,8 +18126,13 @@ class SqlFileShare(_serialization.Model): } def __init__( - self, *, path: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, **kwargs - ): + self, + *, + path: Optional[str] = None, + username: Optional[str] = None, + password: Optional[str] = None, + **kwargs: Any + ) -> None: """ :keyword path: Location as SMB share or local drive where backups are placed. :paramtype path: str @@ -17126,14 +18168,14 @@ class SqlMigrationListResult(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.value = None self.next_link = None -class SqlMigrationService(TrackedResource): +class SqlMigrationService(TrackedResourceAutoGenerated): """A SQL Migration Service. Variables are only populated by the server, and will be ignored when sending a request. @@ -17149,7 +18191,7 @@ class SqlMigrationService(TrackedResource): :ivar type: :vartype type: str :ivar system_data: - :vartype system_data: ~azure.mgmt.datamigration.models.SystemData + :vartype system_data: ~azure.mgmt.datamigration.models.SystemDataAutoGenerated :ivar provisioning_state: Provisioning state to track the async operation status. :vartype provisioning_state: str :ivar integration_runtime_state: Current state of the Integration runtime. @@ -17171,12 +18213,12 @@ class SqlMigrationService(TrackedResource): "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, - "system_data": {"key": "systemData", "type": "SystemData"}, + "system_data": {"key": "systemData", "type": "SystemDataAutoGenerated"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "integration_runtime_state": {"key": "properties.integrationRuntimeState", "type": "str"}, } - def __init__(self, *, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs): + def __init__(self, *, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: """ :keyword location: :paramtype location: str @@ -17199,7 +18241,7 @@ class SqlMigrationServiceUpdate(_serialization.Model): "tags": {"key": "tags", "type": "{str}"}, } - def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs): + def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: """ :keyword tags: Dictionary of :code:``. :paramtype tags: dict[str, str] @@ -17211,8 +18253,8 @@ def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs): class SsisMigrationInfo(_serialization.Model): """SSIS migration info with SSIS store type, overwrite policy. - :ivar ssis_store_type: The SSIS store type of source, only SSIS catalog is supported now in - DMS. "SsisCatalog" + :ivar ssis_store_type: The SSIS store type of source, only SSIS catalog is supported now in DMS + (classic). "SsisCatalog" :vartype ssis_store_type: str or ~azure.mgmt.datamigration.models.SsisStoreType :ivar project_overwrite_option: The overwrite option for the SSIS project migration. Known values are: "Ignore" and "Overwrite". @@ -17236,11 +18278,11 @@ def __init__( ssis_store_type: Optional[Union[str, "_models.SsisStoreType"]] = None, project_overwrite_option: Optional[Union[str, "_models.SsisMigrationOverwriteOption"]] = None, environment_overwrite_option: Optional[Union[str, "_models.SsisMigrationOverwriteOption"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword ssis_store_type: The SSIS store type of source, only SSIS catalog is supported now in - DMS. "SsisCatalog" + DMS (classic). "SsisCatalog" :paramtype ssis_store_type: str or ~azure.mgmt.datamigration.models.SsisStoreType :keyword project_overwrite_option: The overwrite option for the SSIS project migration. Known values are: "Ignore" and "Overwrite". @@ -17283,7 +18325,7 @@ class StartMigrationScenarioServerRoleResult(_serialization.Model): "exceptions_and_warnings": {"key": "exceptionsAndWarnings", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.name = None @@ -17316,7 +18358,7 @@ class SyncMigrationDatabaseErrorEvent(_serialization.Model): "event_text": {"key": "eventText", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.timestamp_string = None @@ -17325,18 +18367,83 @@ def __init__(self, **kwargs): class SystemData(_serialization.Model): - """SystemData. + """Metadata pertaining to creation and last modification of the resource. + + :ivar created_by: The identity that created the resource. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". + :vartype created_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType + :ivar created_at: The timestamp of resource creation (UTC). + :vartype created_at: ~datetime.datetime + :ivar last_modified_by: The identity that last modified the resource. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the resource. Known values + are: "User", "Application", "ManagedIdentity", and "Key". + :vartype last_modified_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType + :ivar last_modified_at: The timestamp of resource last modification (UTC). + :vartype last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + "created_by": {"key": "createdBy", "type": "str"}, + "created_by_type": {"key": "createdByType", "type": "str"}, + "created_at": {"key": "createdAt", "type": "iso-8601"}, + "last_modified_by": {"key": "lastModifiedBy", "type": "str"}, + "last_modified_by_type": {"key": "lastModifiedByType", "type": "str"}, + "last_modified_at": {"key": "lastModifiedAt", "type": "iso-8601"}, + } + + def __init__( + self, + *, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """ + :keyword created_by: The identity that created the resource. + :paramtype created_by: str + :keyword created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". + :paramtype created_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType + :keyword created_at: The timestamp of resource creation (UTC). + :paramtype created_at: ~datetime.datetime + :keyword last_modified_by: The identity that last modified the resource. + :paramtype last_modified_by: str + :keyword last_modified_by_type: The type of identity that last modified the resource. Known + values are: "User", "Application", "ManagedIdentity", and "Key". + :paramtype last_modified_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType + :keyword last_modified_at: The timestamp of resource last modification (UTC). + :paramtype last_modified_at: ~datetime.datetime + """ + super().__init__(**kwargs) + self.created_by = created_by + self.created_by_type = created_by_type + self.created_at = created_at + self.last_modified_by = last_modified_by + self.last_modified_by_type = last_modified_by_type + self.last_modified_at = last_modified_at + + +class SystemDataAutoGenerated(_serialization.Model): + """SystemDataAutoGenerated. :ivar created_by: :vartype created_by: str - :ivar created_by_type: Known values are: "User", "Application", "ManagedIdentity", and "Key". + :ivar created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". :vartype created_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType :ivar created_at: :vartype created_at: ~datetime.datetime :ivar last_modified_by: :vartype last_modified_by: str - :ivar last_modified_by_type: Known values are: "User", "Application", "ManagedIdentity", and - "Key". + :ivar last_modified_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". :vartype last_modified_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType :ivar last_modified_at: :vartype last_modified_at: ~datetime.datetime @@ -17360,20 +18467,20 @@ def __init__( last_modified_by: Optional[str] = None, last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, last_modified_at: Optional[datetime.datetime] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword created_by: :paramtype created_by: str - :keyword created_by_type: Known values are: "User", "Application", "ManagedIdentity", and - "Key". + :keyword created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". :paramtype created_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType :keyword created_at: :paramtype created_at: ~datetime.datetime :keyword last_modified_by: :paramtype last_modified_by: str - :keyword last_modified_by_type: Known values are: "User", "Application", "ManagedIdentity", and - "Key". + :keyword last_modified_by_type: The type of identity that created the resource. Known values + are: "User", "Application", "ManagedIdentity", and "Key". :paramtype last_modified_by_type: str or ~azure.mgmt.datamigration.models.CreatedByType :keyword last_modified_at: :paramtype last_modified_at: ~datetime.datetime @@ -17402,8 +18509,8 @@ class TargetLocation(_serialization.Model): } def __init__( - self, *, storage_account_resource_id: Optional[str] = None, account_key: Optional[str] = None, **kwargs - ): + self, *, storage_account_resource_id: Optional[str] = None, account_key: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword storage_account_resource_id: Resource Id of the storage account copying backups. :paramtype storage_account_resource_id: str @@ -17430,8 +18537,8 @@ class TaskList(_serialization.Model): } def __init__( - self, *, value: Optional[List["_models.ProjectTask"]] = None, next_link: Optional[str] = None, **kwargs - ): + self, *, value: Optional[List["_models.ProjectTask"]] = None, next_link: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword value: List of tasks. :paramtype value: list[~azure.mgmt.datamigration.models.ProjectTask] @@ -17454,7 +18561,7 @@ class UploadOCIDriverTaskInput(_serialization.Model): "driver_share": {"key": "driverShare", "type": "FileShare"}, } - def __init__(self, *, driver_share: Optional["_models.FileShare"] = None, **kwargs): + def __init__(self, *, driver_share: Optional["_models.FileShare"] = None, **kwargs: Any) -> None: """ :keyword driver_share: File share information for the OCI driver archive. :paramtype driver_share: ~azure.mgmt.datamigration.models.FileShare @@ -17484,7 +18591,7 @@ class UploadOCIDriverTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.driver_package_name = None @@ -17496,7 +18603,7 @@ class UploadOCIDriverTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -17555,8 +18662,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.UploadOCIDriverTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -17569,12 +18676,12 @@ def __init__( self.output = None -class ValidateMigrationInputSqlServerSqlDbSyncTaskProperties(ProjectTaskProperties): +class ValidateMigrationInputSqlServerSqlDbSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long """Properties for task that validates migration input for SQL to Azure SQL DB sync migrations. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -17634,8 +18741,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.ValidateSyncMigrationInputSqlServerTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -17648,10 +18755,11 @@ def __init__( self.output = None -class ValidateMigrationInputSqlServerSqlMISyncTaskInput(SqlServerSqlMISyncTaskInput): - """Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online scenario. +class ValidateMigrationInputSqlServerSqlMISyncTaskInput(SqlServerSqlMISyncTaskInput): # pylint: disable=name-too-long + """Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online + scenario. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar selected_databases: Databases to migrate. Required. :vartype selected_databases: @@ -17665,70 +18773,16 @@ class ValidateMigrationInputSqlServerSqlMISyncTaskInput(SqlServerSqlMISyncTaskIn :ivar target_connection_info: Connection information for Azure SQL Database Managed Instance. Required. :vartype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo - :ivar azure_app: Azure Active Directory Application the DMS instance will use to connect to the - target instance of Azure SQL Database Managed Instance and the Azure Storage Account. Required. + :ivar azure_app: Azure Active Directory Application the DMS (classic) instance will use to + connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage + Account. Required. :vartype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp """ - _validation = { - "selected_databases": {"required": True}, - "storage_resource_id": {"required": True}, - "source_connection_info": {"required": True}, - "target_connection_info": {"required": True}, - "azure_app": {"required": True}, - } - - _attribute_map = { - "selected_databases": {"key": "selectedDatabases", "type": "[MigrateSqlServerSqlMIDatabaseInput]"}, - "backup_file_share": {"key": "backupFileShare", "type": "FileShare"}, - "storage_resource_id": {"key": "storageResourceId", "type": "str"}, - "source_connection_info": {"key": "sourceConnectionInfo", "type": "SqlConnectionInfo"}, - "target_connection_info": {"key": "targetConnectionInfo", "type": "MiSqlConnectionInfo"}, - "azure_app": {"key": "azureApp", "type": "AzureActiveDirectoryApp"}, - } - - def __init__( - self, - *, - selected_databases: List["_models.MigrateSqlServerSqlMIDatabaseInput"], - storage_resource_id: str, - source_connection_info: "_models.SqlConnectionInfo", - target_connection_info: "_models.MiSqlConnectionInfo", - azure_app: "_models.AzureActiveDirectoryApp", - backup_file_share: Optional["_models.FileShare"] = None, - **kwargs - ): - """ - :keyword selected_databases: Databases to migrate. Required. - :paramtype selected_databases: - list[~azure.mgmt.datamigration.models.MigrateSqlServerSqlMIDatabaseInput] - :keyword backup_file_share: Backup file share information for all selected databases. - :paramtype backup_file_share: ~azure.mgmt.datamigration.models.FileShare - :keyword storage_resource_id: Fully qualified resourceId of storage. Required. - :paramtype storage_resource_id: str - :keyword source_connection_info: Connection information for source SQL Server. Required. - :paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo - :keyword target_connection_info: Connection information for Azure SQL Database Managed - Instance. Required. - :paramtype target_connection_info: ~azure.mgmt.datamigration.models.MiSqlConnectionInfo - :keyword azure_app: Azure Active Directory Application the DMS instance will use to connect to - the target instance of Azure SQL Database Managed Instance and the Azure Storage Account. - Required. - :paramtype azure_app: ~azure.mgmt.datamigration.models.AzureActiveDirectoryApp - """ - super().__init__( - selected_databases=selected_databases, - backup_file_share=backup_file_share, - storage_resource_id=storage_resource_id, - source_connection_info=source_connection_info, - target_connection_info=target_connection_info, - azure_app=azure_app, - **kwargs - ) - -class ValidateMigrationInputSqlServerSqlMISyncTaskOutput(_serialization.Model): - """Output for task that validates migration input for Azure SQL Database Managed Instance online migration. +class ValidateMigrationInputSqlServerSqlMISyncTaskOutput(_serialization.Model): # pylint: disable=name-too-long + """Output for task that validates migration input for Azure SQL Database Managed Instance online + migration. Variables are only populated by the server, and will be ignored when sending a request. @@ -17752,7 +18806,7 @@ class ValidateMigrationInputSqlServerSqlMISyncTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -17760,12 +18814,13 @@ def __init__(self, **kwargs): self.validation_errors = None -class ValidateMigrationInputSqlServerSqlMISyncTaskProperties(ProjectTaskProperties): - """Properties for task that validates migration input for SQL to Azure SQL Database Managed Instance sync scenario. +class ValidateMigrationInputSqlServerSqlMISyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long + """Properties for task that validates migration input for SQL to Azure SQL Database Managed + Instance sync scenario. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -17826,8 +18881,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.ValidateMigrationInputSqlServerSqlMISyncTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -17841,10 +18896,10 @@ def __init__( self.output = None -class ValidateMigrationInputSqlServerSqlMITaskInput(_serialization.Model): +class ValidateMigrationInputSqlServerSqlMITaskInput(_serialization.Model): # pylint: disable=name-too-long """Input for task that validates migration input for SQL to Azure SQL Managed Instance. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Information for connecting to source. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -17892,8 +18947,8 @@ def __init__( selected_logins: Optional[List[str]] = None, backup_file_share: Optional["_models.FileShare"] = None, backup_mode: Optional[Union[str, "_models.BackupMode"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Information for connecting to source. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -17923,8 +18978,9 @@ def __init__( self.backup_mode = backup_mode -class ValidateMigrationInputSqlServerSqlMITaskOutput(_serialization.Model): - """Output for task that validates migration input for SQL to Azure SQL Managed Instance migrations. +class ValidateMigrationInputSqlServerSqlMITaskOutput(_serialization.Model): # pylint: disable=name-too-long + """Output for task that validates migration input for SQL to Azure SQL Managed Instance + migrations. Variables are only populated by the server, and will be ignored when sending a request. @@ -17971,7 +19027,7 @@ class ValidateMigrationInputSqlServerSqlMITaskOutput(_serialization.Model): "database_backup_info": {"key": "databaseBackupInfo", "type": "DatabaseBackupInfo"}, } - def __init__(self, *, database_backup_info: Optional["_models.DatabaseBackupInfo"] = None, **kwargs): + def __init__(self, *, database_backup_info: Optional["_models.DatabaseBackupInfo"] = None, **kwargs: Any) -> None: """ :keyword database_backup_info: Information about backup files when existing backup mode is used. @@ -17988,12 +19044,13 @@ def __init__(self, *, database_backup_info: Optional["_models.DatabaseBackupInfo self.database_backup_info = database_backup_info -class ValidateMigrationInputSqlServerSqlMITaskProperties(ProjectTaskProperties): - """Properties for task that validates migration input for SQL to Azure SQL Database Managed Instance. +class ValidateMigrationInputSqlServerSqlMITaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long + """Properties for task that validates migration input for SQL to Azure SQL Database Managed + Instance. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -18053,8 +19110,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.ValidateMigrationInputSqlServerSqlMITaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -18073,7 +19130,7 @@ class ValidateMongoDbTaskProperties(ProjectTaskProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -18132,8 +19189,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.MongoDbMigrationSettings"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -18146,12 +19203,13 @@ def __init__( self.output = None -class ValidateOracleAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties): - """Properties for the task that validates a migration for Oracle to Azure Database for PostgreSQL for online migrations. +class ValidateOracleAzureDbForPostgreSqlSyncTaskProperties(ProjectTaskProperties): # pylint: disable=name-too-long + """Properties for the task that validates a migration for Oracle to Azure Database for PostgreSQL + for online migrations. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar task_type: Task type. Required. Known values are: "Connect.MongoDb", "ConnectToSource.SqlServer", "ConnectToSource.SqlServer.Sync", @@ -18212,8 +19270,8 @@ def __init__( *, client_data: Optional[Dict[str, str]] = None, input: Optional["_models.MigrateOracleAzureDbPostgreSqlSyncTaskInput"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_data: Key value pairs of client data to attach meta data information to task. :paramtype client_data: dict[str, str] @@ -18227,8 +19285,9 @@ def __init__( self.output = None -class ValidateOracleAzureDbPostgreSqlSyncTaskOutput(_serialization.Model): - """Output for task that validates migration input for Oracle to Azure Database for PostgreSQL for online migrations. +class ValidateOracleAzureDbPostgreSqlSyncTaskOutput(_serialization.Model): # pylint: disable=name-too-long + """Output for task that validates migration input for Oracle to Azure Database for PostgreSQL for + online migrations. Variables are only populated by the server, and will be ignored when sending a request. @@ -18244,16 +19303,16 @@ class ValidateOracleAzureDbPostgreSqlSyncTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.validation_errors = None -class ValidateSyncMigrationInputSqlServerTaskInput(_serialization.Model): +class ValidateSyncMigrationInputSqlServerTaskInput(_serialization.Model): # pylint: disable=name-too-long """Input for task that validates migration input for SQL sync migrations. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar source_connection_info: Information for connecting to source SQL server. Required. :vartype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -18282,8 +19341,8 @@ def __init__( source_connection_info: "_models.SqlConnectionInfo", target_connection_info: "_models.SqlConnectionInfo", selected_databases: List["_models.MigrateSqlServerSqlDbSyncDatabaseInput"], - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source_connection_info: Information for connecting to source SQL server. Required. :paramtype source_connection_info: ~azure.mgmt.datamigration.models.SqlConnectionInfo @@ -18299,7 +19358,7 @@ def __init__( self.selected_databases = selected_databases -class ValidateSyncMigrationInputSqlServerTaskOutput(_serialization.Model): +class ValidateSyncMigrationInputSqlServerTaskOutput(_serialization.Model): # pylint: disable=name-too-long """Output for task that validates migration input for SQL sync migrations. Variables are only populated by the server, and will be ignored when sending a request. @@ -18324,7 +19383,7 @@ class ValidateSyncMigrationInputSqlServerTaskOutput(_serialization.Model): "validation_errors": {"key": "validationErrors", "type": "[ReportableException]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -18347,8 +19406,8 @@ class ValidationError(_serialization.Model): } def __init__( - self, *, text: Optional[str] = None, severity: Optional[Union[str, "_models.Severity"]] = None, **kwargs - ): + self, *, text: Optional[str] = None, severity: Optional[Union[str, "_models.Severity"]] = None, **kwargs: Any + ) -> None: """ :keyword text: Error Text. :paramtype text: str @@ -18378,8 +19437,13 @@ class WaitStatistics(_serialization.Model): } def __init__( - self, *, wait_type: Optional[str] = None, wait_time_ms: float = 0, wait_count: Optional[int] = None, **kwargs - ): + self, + *, + wait_type: Optional[str] = None, + wait_time_ms: float = 0, + wait_count: Optional[int] = None, + **kwargs: Any + ) -> None: """ :keyword wait_type: Type of the Wait. :paramtype wait_type: str diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/__init__.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/__init__.py index 4825871afb87..c6181dc277d6 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/__init__.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/__init__.py @@ -6,10 +6,17 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from ._database_migrations_mongo_to_cosmos_db_ru_mongo_operations import ( + DatabaseMigrationsMongoToCosmosDbRUMongoOperations, +) +from ._database_migrations_mongo_to_cosmos_dbv_core_mongo_operations import ( + DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations, +) from ._database_migrations_sql_db_operations import DatabaseMigrationsSqlDbOperations from ._database_migrations_sql_mi_operations import DatabaseMigrationsSqlMiOperations from ._database_migrations_sql_vm_operations import DatabaseMigrationsSqlVmOperations from ._operations import Operations +from ._migration_services_operations import MigrationServicesOperations from ._sql_migration_services_operations import SqlMigrationServicesOperations from ._resource_skus_operations import ResourceSkusOperations from ._services_operations import ServicesOperations @@ -24,10 +31,13 @@ from ._patch import patch_sdk as _patch_sdk __all__ = [ + "DatabaseMigrationsMongoToCosmosDbRUMongoOperations", + "DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations", "DatabaseMigrationsSqlDbOperations", "DatabaseMigrationsSqlMiOperations", "DatabaseMigrationsSqlVmOperations", "Operations", + "MigrationServicesOperations", "SqlMigrationServicesOperations", "ResourceSkusOperations", "ServicesOperations", diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py new file mode 100644 index 000000000000..e0c48395dd51 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py @@ -0,0 +1,694 @@ +# pylint: disable=too-many-lines,too-many-statements +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +import sys +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, target_resource_name: str, migration_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations/{migrationName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "targetResourceName": _SERIALIZER.url( + "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$" + ), + "migrationName": _SERIALIZER.url("migration_name", migration_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_request( + resource_group_name: str, target_resource_name: str, migration_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations/{migrationName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "targetResourceName": _SERIALIZER.url( + "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$" + ), + "migrationName": _SERIALIZER.url("migration_name", migration_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + target_resource_name: str, + migration_name: str, + subscription_id: str, + *, + force: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations/{migrationName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "targetResourceName": _SERIALIZER.url( + "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$" + ), + "migrationName": _SERIALIZER.url("migration_name", migration_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if force is not None: + _params["force"] = _SERIALIZER.query("force", force, "bool") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_for_scope_request( + resource_group_name: str, target_resource_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "targetResourceName": _SERIALIZER.url( + "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$" + ), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class DatabaseMigrationsMongoToCosmosDbRUMongoOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.datamigration.DataMigrationManagementClient`'s + :attr:`database_migrations_mongo_to_cosmos_db_ru_mongo` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, target_resource_name: str, migration_name: str, **kwargs: Any + ) -> _models.DatabaseMigrationCosmosDbMongo: + """Get Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :return: DatabaseMigrationCosmosDbMongo or the result of cls(response) + :rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None) + + _request = build_get_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_initial( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "DatabaseMigrationCosmosDbMongo") + + _request = build_create_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: _models.DatabaseMigrationCosmosDbMongo, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DatabaseMigrationCosmosDbMongo]: + """Create or Update Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param parameters: Details of CosmosDB for Mongo API Migration resource. Required. + :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns either DatabaseMigrationCosmosDbMongo or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DatabaseMigrationCosmosDbMongo]: + """Create or Update Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param parameters: Details of CosmosDB for Mongo API Migration resource. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns either DatabaseMigrationCosmosDbMongo or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.DatabaseMigrationCosmosDbMongo]: + """Create or Update Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param parameters: Details of CosmosDB for Mongo API Migration resource. Is either a + DatabaseMigrationCosmosDbMongo type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo or IO[bytes] + :return: An instance of LROPoller that returns either DatabaseMigrationCosmosDbMongo or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_initial( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.DatabaseMigrationCosmosDbMongo].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.DatabaseMigrationCosmosDbMongo]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + force: Optional[bool] = None, + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_delete_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + subscription_id=self._config.subscription_id, + force=force, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + force: Optional[bool] = None, + **kwargs: Any + ) -> LROPoller[None]: + """Delete Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param force: Optional force delete boolean. If this is provided as true, migration will be + deleted even if active. Default value is None. + :type force: bool + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + force=force, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def get_for_scope( + self, resource_group_name: str, target_resource_name: str, **kwargs: Any + ) -> Iterable["_models.DatabaseMigrationCosmosDbMongo"]: + """Get Database Migration resources for the scope. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :return: An iterator like instance of either DatabaseMigrationCosmosDbMongo or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DatabaseMigrationCosmosDbMongoListResult] = kwargs.pop("cls", None) + + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_get_for_scope_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request + + def extract_data(pipeline_response): + deserialized = self._deserialize("DatabaseMigrationCosmosDbMongoListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py new file mode 100644 index 000000000000..bb8d5532e4a6 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py @@ -0,0 +1,694 @@ +# pylint: disable=too-many-lines,too-many-statements +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +import sys +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, target_resource_name: str, migration_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/mongoClusters/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations/{migrationName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "targetResourceName": _SERIALIZER.url( + "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$" + ), + "migrationName": _SERIALIZER.url("migration_name", migration_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_request( + resource_group_name: str, target_resource_name: str, migration_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/mongoClusters/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations/{migrationName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "targetResourceName": _SERIALIZER.url( + "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$" + ), + "migrationName": _SERIALIZER.url("migration_name", migration_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + target_resource_name: str, + migration_name: str, + subscription_id: str, + *, + force: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/mongoClusters/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations/{migrationName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "targetResourceName": _SERIALIZER.url( + "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$" + ), + "migrationName": _SERIALIZER.url("migration_name", migration_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$"), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if force is not None: + _params["force"] = _SERIALIZER.query("force", force, "bool") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_for_scope_request( + resource_group_name: str, target_resource_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/mongoClusters/{targetResourceName}/providers/Microsoft.DataMigration/databaseMigrations", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "targetResourceName": _SERIALIZER.url( + "target_resource_name", target_resource_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$" + ), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class DatabaseMigrationsMongoToCosmosDbvCoreMongoOperations: # pylint: disable=name-too-long + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.datamigration.DataMigrationManagementClient`'s + :attr:`database_migrations_mongo_to_cosmos_dbv_core_mongo` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, target_resource_name: str, migration_name: str, **kwargs: Any + ) -> _models.DatabaseMigrationCosmosDbMongo: + """Get Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :return: DatabaseMigrationCosmosDbMongo or the result of cls(response) + :rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None) + + _request = build_get_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_initial( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "DatabaseMigrationCosmosDbMongo") + + _request = build_create_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: _models.DatabaseMigrationCosmosDbMongo, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DatabaseMigrationCosmosDbMongo]: + """Create or Update Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param parameters: Details of CosmosDB for Mongo API Migration resource. Required. + :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns either DatabaseMigrationCosmosDbMongo or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DatabaseMigrationCosmosDbMongo]: + """Create or Update Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param parameters: Details of CosmosDB for Mongo API Migration resource. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns either DatabaseMigrationCosmosDbMongo or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + parameters: Union[_models.DatabaseMigrationCosmosDbMongo, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.DatabaseMigrationCosmosDbMongo]: + """Create or Update Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param parameters: Details of CosmosDB for Mongo API Migration resource. Is either a + DatabaseMigrationCosmosDbMongo type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo or IO[bytes] + :return: An instance of LROPoller that returns either DatabaseMigrationCosmosDbMongo or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DatabaseMigrationCosmosDbMongo] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_initial( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("DatabaseMigrationCosmosDbMongo", pipeline_response.http_response) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.DatabaseMigrationCosmosDbMongo].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.DatabaseMigrationCosmosDbMongo]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + force: Optional[bool] = None, + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_delete_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + subscription_id=self._config.subscription_id, + force=force, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, + resource_group_name: str, + target_resource_name: str, + migration_name: str, + force: Optional[bool] = None, + **kwargs: Any + ) -> LROPoller[None]: + """Delete Database Migration resource. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :param migration_name: Name of the migration. Required. + :type migration_name: str + :param force: Optional force delete boolean. If this is provided as true, migration will be + deleted even if active. Default value is None. + :type force: bool + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + migration_name=migration_name, + force=force, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def get_for_scope( + self, resource_group_name: str, target_resource_name: str, **kwargs: Any + ) -> Iterable["_models.DatabaseMigrationCosmosDbMongo"]: + """Get Database Migration resources for the scope. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param target_resource_name: The name of the target resource/account. Required. + :type target_resource_name: str + :return: An iterator like instance of either DatabaseMigrationCosmosDbMongo or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.DatabaseMigrationCosmosDbMongo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DatabaseMigrationCosmosDbMongoListResult] = kwargs.pop("cls", None) + + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_get_for_scope_request( + resource_group_name=resource_group_name, + target_resource_name=target_resource_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request + + def extract_data(pipeline_response): + deserialized = self._deserialize("DatabaseMigrationCosmosDbMongoListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_db_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_db_operations.py index 29e046dbd4fe..c3ee7364ed71 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_db_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_db_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterator, Optional, Type, TypeVar, Union, cast, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,12 +16,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -28,12 +30,11 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -54,9 +55,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -71,7 +70,7 @@ def build_get_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters if migration_operation_id is not None: @@ -92,9 +91,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -110,7 +107,7 @@ def build_create_or_update_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -134,9 +131,7 @@ def build_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) # Construct URL _url = kwargs.pop( "template_url", @@ -149,7 +144,7 @@ def build_delete_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters if force is not None: @@ -165,9 +160,7 @@ def build_cancel_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # Construct URL _url = kwargs.pop( @@ -181,7 +174,7 @@ def build_cancel_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -237,12 +230,11 @@ def get( :type migration_operation_id: str :param expand: Complete migration details be included in the response. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DatabaseMigrationSqlDb or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -253,12 +245,10 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DatabaseMigrationSqlDb] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, sql_db_instance_name=sql_db_instance_name, target_db_name=target_db_name, @@ -266,15 +256,14 @@ def get( migration_operation_id=migration_operation_id, expand=expand, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -283,26 +272,22 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response) + deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return deserialized # type: ignore def _create_or_update_initial( self, resource_group_name: str, sql_db_instance_name: str, target_db_name: str, - parameters: Union[_models.DatabaseMigrationSqlDb, IO], + parameters: Union[_models.DatabaseMigrationSqlDb, IO[bytes]], **kwargs: Any - ) -> _models.DatabaseMigrationSqlDb: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -313,21 +298,19 @@ def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DatabaseMigrationSqlDb] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DatabaseMigrationSqlDb") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, sql_db_instance_name=sql_db_instance_name, target_db_name=target_db_name, @@ -336,38 +319,34 @@ def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } - @overload def begin_create_or_update( self, @@ -393,14 +372,6 @@ def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either DatabaseMigrationSqlDb or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb] @@ -413,7 +384,7 @@ def begin_create_or_update( resource_group_name: str, sql_db_instance_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -428,18 +399,10 @@ def begin_create_or_update( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Details of Sql Db migration resource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either DatabaseMigrationSqlDb or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb] @@ -452,7 +415,7 @@ def begin_create_or_update( resource_group_name: str, sql_db_instance_name: str, target_db_name: str, - parameters: Union[_models.DatabaseMigrationSqlDb, IO], + parameters: Union[_models.DatabaseMigrationSqlDb, IO[bytes]], **kwargs: Any ) -> LROPoller[_models.DatabaseMigrationSqlDb]: """Create or Update Database Migration resource. @@ -464,20 +427,9 @@ def begin_create_or_update( :type sql_db_instance_name: str :param target_db_name: The name of the target database. Required. :type target_db_name: str - :param parameters: Details of Sql Db migration resource. Is either a model type or a IO type. - Required. - :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Details of Sql Db migration resource. Is either a DatabaseMigrationSqlDb + type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb or IO[bytes] :return: An instance of LROPoller that returns either DatabaseMigrationSqlDb or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlDb] @@ -486,9 +438,7 @@ def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DatabaseMigrationSqlDb] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -507,12 +457,13 @@ def begin_create_or_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response) + deserialized = self._deserialize("DatabaseMigrationSqlDb", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -522,27 +473,25 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.DatabaseMigrationSqlDb].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return LROPoller[_models.DatabaseMigrationSqlDb]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) - def _delete_initial( # pylint: disable=inconsistent-return-statements + def _delete_initial( self, resource_group_name: str, sql_db_instance_name: str, target_db_name: str, force: Optional[bool] = None, **kwargs: Any - ) -> None: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -553,41 +502,43 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, sql_db_instance_name=sql_db_instance_name, target_db_name=target_db_name, subscription_id=self._config.subscription_id, force=force, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return deserialized # type: ignore @distributed_trace def begin_delete( @@ -610,14 +561,6 @@ def begin_delete( :param force: Optional force delete boolean. If this is provided as true, migration will be deleted even if active. Default value is None. :type force: bool - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -625,15 +568,13 @@ def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, sql_db_instance_name=sql_db_instance_name, target_db_name=target_db_name, @@ -644,11 +585,12 @@ def begin_delete( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -657,27 +599,23 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - def _cancel_initial( # pylint: disable=inconsistent-return-statements + def _cancel_initial( self, resource_group_name: str, sql_db_instance_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any - ) -> None: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -688,21 +626,19 @@ def _cancel_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "MigrationOperationInput") - request = build_cancel_request( + _request = build_cancel_request( resource_group_name=resource_group_name, sql_db_instance_name=sql_db_instance_name, target_db_name=target_db_name, @@ -711,29 +647,33 @@ def _cancel_initial( # pylint: disable=inconsistent-return-statements content_type=content_type, json=_json, content=_content, - template_url=self._cancel_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _cancel_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel" - } + return deserialized # type: ignore @overload def begin_cancel( @@ -761,14 +701,6 @@ def begin_cancel( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -780,7 +712,7 @@ def begin_cancel( resource_group_name: str, sql_db_instance_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -796,18 +728,10 @@ def begin_cancel( :type target_db_name: str :param parameters: Required migration operation ID for which cancel will be initiated. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -819,7 +743,7 @@ def begin_cancel( resource_group_name: str, sql_db_instance_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any ) -> LROPoller[None]: """Stop on going migration for the database. @@ -832,19 +756,8 @@ def begin_cancel( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Required migration operation ID for which cancel will be initiated. Is - either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + either a MigrationOperationInput type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes] :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -852,16 +765,14 @@ def begin_cancel( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._cancel_initial( # type: ignore + raw_result = self._cancel_initial( resource_group_name=resource_group_name, sql_db_instance_name=sql_db_instance_name, target_db_name=target_db_name, @@ -873,11 +784,12 @@ def begin_cancel( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -886,14 +798,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_cancel.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{sqlDbInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_mi_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_mi_operations.py index 165b7c702c25..15cda7cbf4e1 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_mi_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_mi_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterator, Optional, Type, TypeVar, Union, cast, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,12 +16,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -28,12 +30,11 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -54,9 +55,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -71,7 +70,7 @@ def build_get_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters if migration_operation_id is not None: @@ -92,9 +91,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -110,7 +107,7 @@ def build_create_or_update_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -129,9 +126,7 @@ def build_cancel_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # Construct URL _url = kwargs.pop( @@ -145,7 +140,7 @@ def build_cancel_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -163,9 +158,7 @@ def build_cutover_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # Construct URL _url = kwargs.pop( @@ -179,7 +172,7 @@ def build_cutover_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -235,12 +228,11 @@ def get( :type migration_operation_id: str :param expand: Complete migration details be included in the response. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DatabaseMigrationSqlMi or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -251,12 +243,10 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DatabaseMigrationSqlMi] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, managed_instance_name=managed_instance_name, target_db_name=target_db_name, @@ -264,15 +254,14 @@ def get( migration_operation_id=migration_operation_id, expand=expand, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -281,26 +270,22 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response) + deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return deserialized # type: ignore def _create_or_update_initial( self, resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: Union[_models.DatabaseMigrationSqlMi, IO], + parameters: Union[_models.DatabaseMigrationSqlMi, IO[bytes]], **kwargs: Any - ) -> _models.DatabaseMigrationSqlMi: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -311,21 +296,19 @@ def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DatabaseMigrationSqlMi] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DatabaseMigrationSqlMi") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, managed_instance_name=managed_instance_name, target_db_name=target_db_name, @@ -334,38 +317,34 @@ def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } - @overload def begin_create_or_update( self, @@ -391,14 +370,6 @@ def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either DatabaseMigrationSqlMi or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi] @@ -411,7 +382,7 @@ def begin_create_or_update( resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -426,18 +397,10 @@ def begin_create_or_update( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Details of SqlMigrationService resource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either DatabaseMigrationSqlMi or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi] @@ -450,7 +413,7 @@ def begin_create_or_update( resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: Union[_models.DatabaseMigrationSqlMi, IO], + parameters: Union[_models.DatabaseMigrationSqlMi, IO[bytes]], **kwargs: Any ) -> LROPoller[_models.DatabaseMigrationSqlMi]: """Create a new database migration to a given SQL Managed Instance. @@ -462,20 +425,9 @@ def begin_create_or_update( :type managed_instance_name: str :param target_db_name: The name of the target database. Required. :type target_db_name: str - :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Details of SqlMigrationService resource. Is either a DatabaseMigrationSqlMi + type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi or IO[bytes] :return: An instance of LROPoller that returns either DatabaseMigrationSqlMi or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlMi] @@ -484,9 +436,7 @@ def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DatabaseMigrationSqlMi] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -505,12 +455,13 @@ def begin_create_or_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response) + deserialized = self._deserialize("DatabaseMigrationSqlMi", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -520,27 +471,25 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.DatabaseMigrationSqlMi].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return LROPoller[_models.DatabaseMigrationSqlMi]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) - def _cancel_initial( # pylint: disable=inconsistent-return-statements + def _cancel_initial( self, resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any - ) -> None: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -551,21 +500,19 @@ def _cancel_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "MigrationOperationInput") - request = build_cancel_request( + _request = build_cancel_request( resource_group_name=resource_group_name, managed_instance_name=managed_instance_name, target_db_name=target_db_name, @@ -574,29 +521,33 @@ def _cancel_initial( # pylint: disable=inconsistent-return-statements content_type=content_type, json=_json, content=_content, - template_url=self._cancel_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _cancel_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel" - } + return deserialized # type: ignore @overload def begin_cancel( @@ -624,14 +575,6 @@ def begin_cancel( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -643,7 +586,7 @@ def begin_cancel( resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -659,18 +602,10 @@ def begin_cancel( :type target_db_name: str :param parameters: Required migration operation ID for which cancel will be initiated. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -682,7 +617,7 @@ def begin_cancel( resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any ) -> LROPoller[None]: """Stop in-progress database migration to SQL Managed Instance. @@ -695,19 +630,8 @@ def begin_cancel( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Required migration operation ID for which cancel will be initiated. Is - either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + either a MigrationOperationInput type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes] :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -715,16 +639,14 @@ def begin_cancel( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._cancel_initial( # type: ignore + raw_result = self._cancel_initial( resource_group_name=resource_group_name, managed_instance_name=managed_instance_name, target_db_name=target_db_name, @@ -736,11 +658,12 @@ def begin_cancel( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -749,27 +672,23 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_cancel.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - def _cutover_initial( # pylint: disable=inconsistent-return-statements + def _cutover_initial( self, resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any - ) -> None: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -780,21 +699,19 @@ def _cutover_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "MigrationOperationInput") - request = build_cutover_request( + _request = build_cutover_request( resource_group_name=resource_group_name, managed_instance_name=managed_instance_name, target_db_name=target_db_name, @@ -803,29 +720,33 @@ def _cutover_initial( # pylint: disable=inconsistent-return-statements content_type=content_type, json=_json, content=_content, - template_url=self._cutover_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _cutover_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover" - } + return deserialized # type: ignore @overload def begin_cutover( @@ -853,14 +774,6 @@ def begin_cutover( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -872,7 +785,7 @@ def begin_cutover( resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -888,18 +801,10 @@ def begin_cutover( :type target_db_name: str :param parameters: Required migration operation ID for which cutover will be initiated. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -911,7 +816,7 @@ def begin_cutover( resource_group_name: str, managed_instance_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any ) -> LROPoller[None]: """Initiate cutover for in-progress online database migration to SQL Managed Instance. @@ -924,19 +829,8 @@ def begin_cutover( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Required migration operation ID for which cutover will be initiated. Is - either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + either a MigrationOperationInput type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes] :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -944,16 +838,14 @@ def begin_cutover( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._cutover_initial( # type: ignore + raw_result = self._cutover_initial( resource_group_name=resource_group_name, managed_instance_name=managed_instance_name, target_db_name=target_db_name, @@ -965,11 +857,12 @@ def begin_cutover( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -978,14 +871,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_cutover.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_vm_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_vm_operations.py index 73871d141414..41dbd30231f7 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_vm_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_database_migrations_sql_vm_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterator, Optional, Type, TypeVar, Union, cast, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,12 +16,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -28,12 +30,11 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -54,9 +55,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -71,7 +70,7 @@ def build_get_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters if migration_operation_id is not None: @@ -92,9 +91,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -110,7 +107,7 @@ def build_create_or_update_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -129,9 +126,7 @@ def build_cancel_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # Construct URL _url = kwargs.pop( @@ -145,7 +140,7 @@ def build_cancel_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -163,9 +158,7 @@ def build_cutover_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # Construct URL _url = kwargs.pop( @@ -179,7 +172,7 @@ def build_cutover_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -235,12 +228,11 @@ def get( :type migration_operation_id: str :param expand: Complete migration details be included in the response. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DatabaseMigrationSqlVm or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -251,12 +243,10 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DatabaseMigrationSqlVm] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, sql_virtual_machine_name=sql_virtual_machine_name, target_db_name=target_db_name, @@ -264,15 +254,14 @@ def get( migration_operation_id=migration_operation_id, expand=expand, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -281,26 +270,22 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response) + deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return deserialized # type: ignore def _create_or_update_initial( self, resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: Union[_models.DatabaseMigrationSqlVm, IO], + parameters: Union[_models.DatabaseMigrationSqlVm, IO[bytes]], **kwargs: Any - ) -> _models.DatabaseMigrationSqlVm: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -311,21 +296,19 @@ def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DatabaseMigrationSqlVm] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DatabaseMigrationSqlVm") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, sql_virtual_machine_name=sql_virtual_machine_name, target_db_name=target_db_name, @@ -334,38 +317,34 @@ def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } - @overload def begin_create_or_update( self, @@ -391,14 +370,6 @@ def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either DatabaseMigrationSqlVm or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm] @@ -411,7 +382,7 @@ def begin_create_or_update( resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -426,18 +397,10 @@ def begin_create_or_update( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Details of SqlMigrationService resource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either DatabaseMigrationSqlVm or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm] @@ -450,7 +413,7 @@ def begin_create_or_update( resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: Union[_models.DatabaseMigrationSqlVm, IO], + parameters: Union[_models.DatabaseMigrationSqlVm, IO[bytes]], **kwargs: Any ) -> LROPoller[_models.DatabaseMigrationSqlVm]: """Create a new database migration to a given SQL VM. @@ -462,20 +425,9 @@ def begin_create_or_update( :type sql_virtual_machine_name: str :param target_db_name: The name of the target database. Required. :type target_db_name: str - :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Details of SqlMigrationService resource. Is either a DatabaseMigrationSqlVm + type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm or IO[bytes] :return: An instance of LROPoller that returns either DatabaseMigrationSqlVm or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DatabaseMigrationSqlVm] @@ -484,9 +436,7 @@ def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DatabaseMigrationSqlVm] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -505,12 +455,13 @@ def begin_create_or_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response) + deserialized = self._deserialize("DatabaseMigrationSqlVm", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -520,27 +471,25 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.DatabaseMigrationSqlVm].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}" - } + return LROPoller[_models.DatabaseMigrationSqlVm]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) - def _cancel_initial( # pylint: disable=inconsistent-return-statements + def _cancel_initial( self, resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any - ) -> None: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -551,21 +500,19 @@ def _cancel_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "MigrationOperationInput") - request = build_cancel_request( + _request = build_cancel_request( resource_group_name=resource_group_name, sql_virtual_machine_name=sql_virtual_machine_name, target_db_name=target_db_name, @@ -574,29 +521,33 @@ def _cancel_initial( # pylint: disable=inconsistent-return-statements content_type=content_type, json=_json, content=_content, - template_url=self._cancel_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _cancel_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel" - } + return deserialized # type: ignore @overload def begin_cancel( @@ -623,14 +574,6 @@ def begin_cancel( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -642,7 +585,7 @@ def begin_cancel( resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -657,18 +600,10 @@ def begin_cancel( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -680,7 +615,7 @@ def begin_cancel( resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any ) -> LROPoller[None]: """Stop in-progress database migration to SQL VM. @@ -692,19 +627,8 @@ def begin_cancel( :type sql_virtual_machine_name: str :param target_db_name: The name of the target database. Required. :type target_db_name: str - :param parameters: Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Is either a MigrationOperationInput type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes] :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -712,16 +636,14 @@ def begin_cancel( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._cancel_initial( # type: ignore + raw_result = self._cancel_initial( resource_group_name=resource_group_name, sql_virtual_machine_name=sql_virtual_machine_name, target_db_name=target_db_name, @@ -733,11 +655,12 @@ def begin_cancel( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -746,27 +669,23 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_cancel.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cancel" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - def _cutover_initial( # pylint: disable=inconsistent-return-statements + def _cutover_initial( self, resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any - ) -> None: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -777,21 +696,19 @@ def _cutover_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "MigrationOperationInput") - request = build_cutover_request( + _request = build_cutover_request( resource_group_name=resource_group_name, sql_virtual_machine_name=sql_virtual_machine_name, target_db_name=target_db_name, @@ -800,29 +717,33 @@ def _cutover_initial( # pylint: disable=inconsistent-return-statements content_type=content_type, json=_json, content=_content, - template_url=self._cutover_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _cutover_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover" - } + return deserialized # type: ignore @overload def begin_cutover( @@ -849,14 +770,6 @@ def begin_cutover( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -868,7 +781,7 @@ def begin_cutover( resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -883,18 +796,10 @@ def begin_cutover( :param target_db_name: The name of the target database. Required. :type target_db_name: str :param parameters: Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -906,7 +811,7 @@ def begin_cutover( resource_group_name: str, sql_virtual_machine_name: str, target_db_name: str, - parameters: Union[_models.MigrationOperationInput, IO], + parameters: Union[_models.MigrationOperationInput, IO[bytes]], **kwargs: Any ) -> LROPoller[None]: """Initiate cutover for in-progress online database migration to SQL VM. @@ -918,19 +823,8 @@ def begin_cutover( :type sql_virtual_machine_name: str :param target_db_name: The name of the target database. Required. :type target_db_name: str - :param parameters: Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Is either a MigrationOperationInput type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationOperationInput or IO[bytes] :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -938,16 +832,14 @@ def begin_cutover( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._cutover_initial( # type: ignore + raw_result = self._cutover_initial( resource_group_name=resource_group_name, sql_virtual_machine_name=sql_virtual_machine_name, target_db_name=target_db_name, @@ -959,11 +851,12 @@ def begin_cutover( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -972,14 +865,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_cutover.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{sqlVirtualMachineName}/providers/Microsoft.DataMigration/databaseMigrations/{targetDbName}/cutover" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_files_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_files_operations.py index 4f5b912e5f55..35ef8b15b3ec 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_files_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_files_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload import urllib.parse from azure.core.exceptions import ( @@ -20,20 +21,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -47,9 +46,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -64,7 +61,7 @@ def build_list_request( "projectName": _SERIALIZER.url("project_name", project_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -81,9 +78,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -99,7 +94,7 @@ def build_get_request( "fileName": _SERIALIZER.url("file_name", file_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -116,9 +111,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -135,7 +128,7 @@ def build_create_or_update_request( "fileName": _SERIALIZER.url("file_name", file_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -154,9 +147,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -172,7 +163,7 @@ def build_delete_request( "fileName": _SERIALIZER.url("file_name", file_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -189,9 +180,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -208,7 +197,7 @@ def build_update_request( "fileName": _SERIALIZER.url("file_name", file_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -227,9 +216,7 @@ def build_read_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -245,7 +232,7 @@ def build_read_request( "fileName": _SERIALIZER.url("file_name", file_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -262,9 +249,7 @@ def build_read_write_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -280,7 +265,7 @@ def build_read_write_request( "fileName": _SERIALIZER.url("file_name", file_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -325,7 +310,6 @@ def list( :type service_name: str :param project_name: Name of the project. Required. :type project_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ProjectFile or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.ProjectFile] :raises ~azure.core.exceptions.HttpResponseError: @@ -333,12 +317,10 @@ def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.FileList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -349,18 +331,16 @@ def list( def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( group_name=group_name, service_name=service_name, project_name=project_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -372,13 +352,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("FileList", pipeline_response) @@ -388,10 +367,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -404,10 +384,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files" - } - @distributed_trace def get( self, group_name: str, service_name: str, project_name: str, file_name: str, **kwargs: Any @@ -425,12 +401,11 @@ def get( :type project_name: str :param file_name: Name of the File. Required. :type file_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectFile or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectFile :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -441,27 +416,24 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ProjectFile] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( group_name=group_name, service_name=service_name, project_name=project_name, file_name=file_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -471,16 +443,12 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectFile", pipeline_response) + deserialized = self._deserialize("ProjectFile", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}" - } + return deserialized # type: ignore @overload def create_or_update( @@ -511,7 +479,6 @@ def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectFile or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectFile :raises ~azure.core.exceptions.HttpResponseError: @@ -524,7 +491,7 @@ def create_or_update( service_name: str, project_name: str, file_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -542,11 +509,10 @@ def create_or_update( :param file_name: Name of the File. Required. :type file_name: str :param parameters: Information about the file. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectFile or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectFile :raises ~azure.core.exceptions.HttpResponseError: @@ -559,7 +525,7 @@ def create_or_update( service_name: str, project_name: str, file_name: str, - parameters: Union[_models.ProjectFile, IO], + parameters: Union[_models.ProjectFile, IO[bytes]], **kwargs: Any ) -> _models.ProjectFile: """Create a file resource. @@ -574,17 +540,14 @@ def create_or_update( :type project_name: str :param file_name: Name of the File. Required. :type file_name: str - :param parameters: Information about the file. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Information about the file. Is either a ProjectFile type or a IO[bytes] + type. Required. + :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO[bytes] :return: ProjectFile or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectFile :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -595,21 +558,19 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ProjectFile] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "ProjectFile") - request = build_create_or_update_request( + _request = build_create_or_update_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -619,15 +580,14 @@ def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -637,21 +597,13 @@ def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("ProjectFile", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("ProjectFile", pipeline_response) + deserialized = self._deserialize("ProjectFile", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}" - } - @distributed_trace def delete( # pylint: disable=inconsistent-return-statements self, group_name: str, service_name: str, project_name: str, file_name: str, **kwargs: Any @@ -668,12 +620,11 @@ def delete( # pylint: disable=inconsistent-return-statements :type project_name: str :param file_name: Name of the File. Required. :type file_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -684,27 +635,24 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( group_name=group_name, service_name=service_name, project_name=project_name, file_name=file_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -715,11 +663,7 @@ def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}" - } + return cls(pipeline_response, None, {}) # type: ignore @overload def update( @@ -750,7 +694,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectFile or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectFile :raises ~azure.core.exceptions.HttpResponseError: @@ -763,7 +706,7 @@ def update( service_name: str, project_name: str, file_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -781,11 +724,10 @@ def update( :param file_name: Name of the File. Required. :type file_name: str :param parameters: Information about the file. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectFile or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectFile :raises ~azure.core.exceptions.HttpResponseError: @@ -798,7 +740,7 @@ def update( service_name: str, project_name: str, file_name: str, - parameters: Union[_models.ProjectFile, IO], + parameters: Union[_models.ProjectFile, IO[bytes]], **kwargs: Any ) -> _models.ProjectFile: """Update a file. @@ -813,17 +755,14 @@ def update( :type project_name: str :param file_name: Name of the File. Required. :type file_name: str - :param parameters: Information about the file. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Information about the file. Is either a ProjectFile type or a IO[bytes] + type. Required. + :type parameters: ~azure.mgmt.datamigration.models.ProjectFile or IO[bytes] :return: ProjectFile or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectFile :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -834,21 +773,19 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ProjectFile] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "ProjectFile") - request = build_update_request( + _request = build_update_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -858,15 +795,14 @@ def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -876,16 +812,12 @@ def update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectFile", pipeline_response) + deserialized = self._deserialize("ProjectFile", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}" - } + return deserialized # type: ignore @distributed_trace def read( @@ -904,12 +836,11 @@ def read( :type project_name: str :param file_name: Name of the File. Required. :type file_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: FileStorageInfo or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.FileStorageInfo :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -920,27 +851,24 @@ def read( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.FileStorageInfo] = kwargs.pop("cls", None) - request = build_read_request( + _request = build_read_request( group_name=group_name, service_name=service_name, project_name=project_name, file_name=file_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.read.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -950,16 +878,12 @@ def read( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("FileStorageInfo", pipeline_response) + deserialized = self._deserialize("FileStorageInfo", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - read.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}/read" - } + return deserialized # type: ignore @distributed_trace def read_write( @@ -977,12 +901,11 @@ def read_write( :type project_name: str :param file_name: Name of the File. Required. :type file_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: FileStorageInfo or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.FileStorageInfo :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -993,27 +916,24 @@ def read_write( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.FileStorageInfo] = kwargs.pop("cls", None) - request = build_read_write_request( + _request = build_read_write_request( group_name=group_name, service_name=service_name, project_name=project_name, file_name=file_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.read_write.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1023,13 +943,9 @@ def read_write( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("FileStorageInfo", pipeline_response) + deserialized = self._deserialize("FileStorageInfo", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - read_write.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/files/{fileName}/readwrite" - } + return deserialized # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_migration_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_migration_services_operations.py new file mode 100644 index 000000000000..40be79e61129 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_migration_services_operations.py @@ -0,0 +1,1080 @@ +# pylint: disable=too-many-lines,too-many-statements +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +import sys +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, migration_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/migrationServices/{migrationServiceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "migrationServiceName": _SERIALIZER.url( + "migration_service_name", migration_service_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$" + ), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, migration_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/migrationServices/{migrationServiceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "migrationServiceName": _SERIALIZER.url( + "migration_service_name", migration_service_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$" + ), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, migration_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/migrationServices/{migrationServiceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "migrationServiceName": _SERIALIZER.url( + "migration_service_name", migration_service_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$" + ), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, migration_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/migrationServices/{migrationServiceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "migrationServiceName": _SERIALIZER.url( + "migration_service_name", migration_service_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$" + ), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_by_resource_group_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/migrationServices", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/migrationServices" + ) + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_migrations_request( + resource_group_name: str, migration_service_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/migrationServices/{migrationServiceName}/listMigrations", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "migrationServiceName": _SERIALIZER.url( + "migration_service_name", migration_service_name, "str", pattern=r"^[A-Za-z][A-Za-z0-9_-]*$" + ), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class MigrationServicesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.datamigration.DataMigrationManagementClient`'s + :attr:`migration_services` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, migration_service_name: str, **kwargs: Any) -> _models.MigrationService: + """Retrieve the Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :return: MigrationService or the result of cls(response) + :rtype: ~azure.mgmt.datamigration.models.MigrationService + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.MigrationService] = kwargs.pop("cls", None) + + _request = build_get_request( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("MigrationService", pipeline_response.http_response) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + migration_service_name: str, + parameters: Union[_models.MigrationService, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "MigrationService") + + _request = build_create_or_update_request( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + migration_service_name: str, + parameters: _models.MigrationService, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.MigrationService]: + """Create or Update Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :param parameters: Details of MigrationService resource. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationService + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns either MigrationService or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + migration_service_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.MigrationService]: + """Create or Update Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :param parameters: Details of MigrationService resource. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns either MigrationService or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + migration_service_name: str, + parameters: Union[_models.MigrationService, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.MigrationService]: + """Create or Update Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :param parameters: Details of MigrationService resource. Is either a MigrationService type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationService or IO[bytes] + :return: An instance of LROPoller that returns either MigrationService or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.MigrationService] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("MigrationService", pipeline_response.http_response) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.MigrationService].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.MigrationService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial(self, resource_group_name: str, migration_service_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_delete_request( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, migration_service_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + def _update_initial( + self, + resource_group_name: str, + migration_service_name: str, + parameters: Union[_models.MigrationServiceUpdate, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "MigrationServiceUpdate") + + _request = build_update_request( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + migration_service_name: str, + parameters: _models.MigrationServiceUpdate, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.MigrationService]: + """Update Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :param parameters: Details of MigrationService resource. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationServiceUpdate + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns either MigrationService or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + migration_service_name: str, + parameters: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.MigrationService]: + """Update Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :param parameters: Details of MigrationService resource. Required. + :type parameters: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns either MigrationService or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + migration_service_name: str, + parameters: Union[_models.MigrationServiceUpdate, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.MigrationService]: + """Update Database Migration Service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :param parameters: Details of MigrationService resource. Is either a MigrationServiceUpdate + type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.MigrationServiceUpdate or IO[bytes] + :return: An instance of LROPoller that returns either MigrationService or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.MigrationService] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("MigrationService", pipeline_response.http_response) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.MigrationService].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.MigrationService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.MigrationService"]: + """Retrieve all migration services in the resource group. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :return: An iterator like instance of either MigrationService or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.MigrationServiceListResult] = kwargs.pop("cls", None) + + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request + + def extract_data(pipeline_response): + deserialized = self._deserialize("MigrationServiceListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.MigrationService"]: + """Retrieve all migration services in the subscriptions. + + :return: An iterator like instance of either MigrationService or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.MigrationService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.MigrationServiceListResult] = kwargs.pop("cls", None) + + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request + + def extract_data(pipeline_response): + deserialized = self._deserialize("MigrationServiceListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_migrations( + self, resource_group_name: str, migration_service_name: str, **kwargs: Any + ) -> Iterable["_models.DatabaseMigrationBase"]: + """Retrieve the List of database migrations attached to the service. + + :param resource_group_name: Name of the resource group that contains the resource. You can + obtain this value from the Azure Resource Manager API or the portal. Required. + :type resource_group_name: str + :param migration_service_name: Name of the Migration Service. Required. + :type migration_service_name: str + :return: An iterator like instance of either DatabaseMigrationBase or the result of + cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.DatabaseMigrationBase] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DatabaseMigrationBaseListResult] = kwargs.pop("cls", None) + + error_map: MutableMapping[int, Type[HttpResponseError]] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_list_migrations_request( + resource_group_name=resource_group_name, + migration_service_name=migration_service_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request + + def extract_data(pipeline_response): + deserialized = self._deserialize("DatabaseMigrationBaseListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_operations.py index f30e8f2f8534..69376cd6d747 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar import urllib.parse from azure.core.exceptions import ( @@ -20,20 +20,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,9 +43,7 @@ def build_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -85,7 +81,6 @@ def __init__(self, *args, **kwargs): def list(self, **kwargs: Any) -> Iterable["_models.OperationsDefinition"]: """Lists all of the available SQL Migration REST API operations. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either OperationsDefinition or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.OperationsDefinition] @@ -94,12 +89,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.OperationsDefinition"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -110,14 +103,12 @@ def list(self, **kwargs: Any) -> Iterable["_models.OperationsDefinition"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -129,13 +120,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("OperationListResult", pipeline_response) @@ -145,10 +135,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -159,5 +150,3 @@ def get_next(next_link=None): return pipeline_response return ItemPaged(get_next, extract_data) - - list.metadata = {"url": "/providers/Microsoft.DataMigration/operations"} diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_projects_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_projects_operations.py index d52fdf5ad40f..30f81a3f2557 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_projects_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_projects_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload import urllib.parse from azure.core.exceptions import ( @@ -20,20 +21,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,9 +44,7 @@ def build_list_request(group_name: str, service_name: str, subscription_id: str, _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -61,7 +58,7 @@ def build_list_request(group_name: str, service_name: str, subscription_id: str, "serviceName": _SERIALIZER.url("service_name", service_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -78,9 +75,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -96,7 +91,7 @@ def build_create_or_update_request( "projectName": _SERIALIZER.url("project_name", project_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -115,9 +110,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -132,7 +125,7 @@ def build_get_request( "projectName": _SERIALIZER.url("project_name", project_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -155,9 +148,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -172,7 +163,7 @@ def build_delete_request( "projectName": _SERIALIZER.url("project_name", project_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -191,9 +182,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -209,7 +198,7 @@ def build_update_request( "projectName": _SERIALIZER.url("project_name", project_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -252,7 +241,6 @@ def list(self, group_name: str, service_name: str, **kwargs: Any) -> Iterable["_ :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Project or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.Project] :raises ~azure.core.exceptions.HttpResponseError: @@ -260,12 +248,10 @@ def list(self, group_name: str, service_name: str, **kwargs: Any) -> Iterable["_ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ProjectList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -276,17 +262,15 @@ def list(self, group_name: str, service_name: str, **kwargs: Any) -> Iterable["_ def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -298,13 +282,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("ProjectList", pipeline_response) @@ -314,10 +297,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -330,10 +314,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects" - } - @overload def create_or_update( self, @@ -361,7 +341,6 @@ def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Project or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.Project :raises ~azure.core.exceptions.HttpResponseError: @@ -373,7 +352,7 @@ def create_or_update( group_name: str, service_name: str, project_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -390,11 +369,10 @@ def create_or_update( :param project_name: Name of the project. Required. :type project_name: str :param parameters: Information about the project. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Project or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.Project :raises ~azure.core.exceptions.HttpResponseError: @@ -406,7 +384,7 @@ def create_or_update( group_name: str, service_name: str, project_name: str, - parameters: Union[_models.Project, IO], + parameters: Union[_models.Project, IO[bytes]], **kwargs: Any ) -> _models.Project: """Create or update project. @@ -420,18 +398,14 @@ def create_or_update( :type service_name: str :param project_name: Name of the project. Required. :type project_name: str - :param parameters: Information about the project. Is either a model type or a IO type. + :param parameters: Information about the project. Is either a Project type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.datamigration.models.Project or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :type parameters: ~azure.mgmt.datamigration.models.Project or IO[bytes] :return: Project or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.Project :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -442,21 +416,19 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Project] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "Project") - request = build_create_or_update_request( + _request = build_create_or_update_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -465,15 +437,14 @@ def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -483,21 +454,13 @@ def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("Project", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("Project", pipeline_response) + deserialized = self._deserialize("Project", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}" - } - @distributed_trace def get(self, group_name: str, service_name: str, project_name: str, **kwargs: Any) -> _models.Project: """Get project information. @@ -511,12 +474,11 @@ def get(self, group_name: str, service_name: str, project_name: str, **kwargs: A :type service_name: str :param project_name: Name of the project. Required. :type project_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Project or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.Project :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -527,26 +489,23 @@ def get(self, group_name: str, service_name: str, project_name: str, **kwargs: A _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Project] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( group_name=group_name, service_name=service_name, project_name=project_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -556,16 +515,12 @@ def get(self, group_name: str, service_name: str, project_name: str, **kwargs: A error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Project", pipeline_response) + deserialized = self._deserialize("Project", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}" - } + return deserialized # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -590,12 +545,11 @@ def delete( # pylint: disable=inconsistent-return-statements :param delete_running_tasks: Delete the resource even if it contains running tasks. Default value is None. :type delete_running_tasks: bool - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -606,27 +560,24 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( group_name=group_name, service_name=service_name, project_name=project_name, subscription_id=self._config.subscription_id, delete_running_tasks=delete_running_tasks, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -637,11 +588,7 @@ def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}" - } + return cls(pipeline_response, None, {}) # type: ignore @overload def update( @@ -670,7 +617,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Project or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.Project :raises ~azure.core.exceptions.HttpResponseError: @@ -682,7 +628,7 @@ def update( group_name: str, service_name: str, project_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -699,11 +645,10 @@ def update( :param project_name: Name of the project. Required. :type project_name: str :param parameters: Information about the project. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Project or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.Project :raises ~azure.core.exceptions.HttpResponseError: @@ -715,7 +660,7 @@ def update( group_name: str, service_name: str, project_name: str, - parameters: Union[_models.Project, IO], + parameters: Union[_models.Project, IO[bytes]], **kwargs: Any ) -> _models.Project: """Update project. @@ -729,18 +674,14 @@ def update( :type service_name: str :param project_name: Name of the project. Required. :type project_name: str - :param parameters: Information about the project. Is either a model type or a IO type. + :param parameters: Information about the project. Is either a Project type or a IO[bytes] type. Required. - :type parameters: ~azure.mgmt.datamigration.models.Project or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :type parameters: ~azure.mgmt.datamigration.models.Project or IO[bytes] :return: Project or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.Project :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -751,21 +692,19 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Project] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "Project") - request = build_update_request( + _request = build_update_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -774,15 +713,14 @@ def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -792,13 +730,9 @@ def update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Project", pipeline_response) + deserialized = self._deserialize("Project", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}" - } + return deserialized # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_resource_skus_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_resource_skus_operations.py index 83384702f205..1b3b38f19052 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_resource_skus_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_resource_skus_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar import urllib.parse from azure.core.exceptions import ( @@ -20,20 +20,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,9 +43,7 @@ def build_list_skus_request(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -56,7 +52,7 @@ def build_list_skus_request(subscription_id: str, **kwargs: Any) -> HttpRequest: "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -90,9 +86,8 @@ def __init__(self, *args, **kwargs): def list_skus(self, **kwargs: Any) -> Iterable["_models.ResourceSku"]: """Get supported SKUs. - The skus action returns the list of SKUs that DMS supports. + The skus action returns the list of SKUs that DMS (classic) supports. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ResourceSku or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.ResourceSku] :raises ~azure.core.exceptions.HttpResponseError: @@ -100,12 +95,10 @@ def list_skus(self, **kwargs: Any) -> Iterable["_models.ResourceSku"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ResourceSkusResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -116,15 +109,13 @@ def list_skus(self, **kwargs: Any) -> Iterable["_models.ResourceSku"]: def prepare_request(next_link=None): if not next_link: - request = build_list_skus_request( + _request = build_list_skus_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_skus.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -136,13 +127,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("ResourceSkusResult", pipeline_response) @@ -152,10 +142,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -167,5 +158,3 @@ def get_next(next_link=None): return pipeline_response return ItemPaged(get_next, extract_data) - - list_skus.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/skus"} diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_service_tasks_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_service_tasks_operations.py index 7a7fc9fdad40..03bd18f85314 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_service_tasks_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_service_tasks_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload import urllib.parse from azure.core.exceptions import ( @@ -20,20 +21,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -47,9 +46,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -63,7 +60,7 @@ def build_list_request( "serviceName": _SERIALIZER.url("service_name", service_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -82,9 +79,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -100,7 +95,7 @@ def build_create_or_update_request( "taskName": _SERIALIZER.url("task_name", task_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -125,9 +120,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -142,7 +135,7 @@ def build_get_request( "taskName": _SERIALIZER.url("task_name", task_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -167,9 +160,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -184,7 +175,7 @@ def build_delete_request( "taskName": _SERIALIZER.url("task_name", task_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -203,9 +194,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -221,7 +210,7 @@ def build_update_request( "taskName": _SERIALIZER.url("task_name", task_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -240,9 +229,7 @@ def build_cancel_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -257,7 +244,7 @@ def build_cancel_request( "taskName": _SERIALIZER.url("task_name", task_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -293,10 +280,10 @@ def list( ) -> Iterable["_models.ProjectTask"]: """Get service level tasks for a service. - The services resource is the top-level resource that represents the Database Migration Service. - This method returns a list of service level tasks owned by a service resource. Some tasks may - have a status of Unknown, which indicates that an error occurred while querying the status of - that task. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). This method returns a list of service level tasks owned by a service + resource. Some tasks may have a status of Unknown, which indicates that an error occurred while + querying the status of that task. :param group_name: Name of the resource group. Required. :type group_name: str @@ -304,7 +291,6 @@ def list( :type service_name: str :param task_type: Filter tasks by task type. Default value is None. :type task_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ProjectTask or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.ProjectTask] :raises ~azure.core.exceptions.HttpResponseError: @@ -312,12 +298,10 @@ def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.TaskList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -328,18 +312,16 @@ def list( def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, task_type=task_type, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -351,13 +333,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("TaskList", pipeline_response) @@ -367,10 +348,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -383,10 +365,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks" - } - @overload def create_or_update( self, @@ -401,9 +379,9 @@ def create_or_update( """Create or update service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The PUT method creates a new service task or updates an existing one, although - since service tasks have no mutable custom properties, there is little reason to update an - existing one. + DMS (classic) instance. The PUT method creates a new service task or updates an existing one, + although since service tasks have no mutable custom properties, there is little reason to + update an existing one. :param group_name: Name of the resource group. Required. :type group_name: str @@ -416,7 +394,6 @@ def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -428,7 +405,7 @@ def create_or_update( group_name: str, service_name: str, task_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -436,9 +413,9 @@ def create_or_update( """Create or update service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The PUT method creates a new service task or updates an existing one, although - since service tasks have no mutable custom properties, there is little reason to update an - existing one. + DMS (classic) instance. The PUT method creates a new service task or updates an existing one, + although since service tasks have no mutable custom properties, there is little reason to + update an existing one. :param group_name: Name of the resource group. Required. :type group_name: str @@ -447,11 +424,10 @@ def create_or_update( :param task_name: Name of the Task. Required. :type task_name: str :param parameters: Information about the task. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -463,15 +439,15 @@ def create_or_update( group_name: str, service_name: str, task_name: str, - parameters: Union[_models.ProjectTask, IO], + parameters: Union[_models.ProjectTask, IO[bytes]], **kwargs: Any ) -> _models.ProjectTask: """Create or update service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The PUT method creates a new service task or updates an existing one, although - since service tasks have no mutable custom properties, there is little reason to update an - existing one. + DMS (classic) instance. The PUT method creates a new service task or updates an existing one, + although since service tasks have no mutable custom properties, there is little reason to + update an existing one. :param group_name: Name of the resource group. Required. :type group_name: str @@ -479,17 +455,14 @@ def create_or_update( :type service_name: str :param task_name: Name of the Task. Required. :type task_name: str - :param parameters: Information about the task. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes] + type. Required. + :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes] :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -500,21 +473,19 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "ProjectTask") - request = build_create_or_update_request( + _request = build_create_or_update_request( group_name=group_name, service_name=service_name, task_name=task_name, @@ -523,15 +494,14 @@ def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -541,21 +511,13 @@ def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("ProjectTask", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}" - } - @distributed_trace def get( self, group_name: str, service_name: str, task_name: str, expand: Optional[str] = None, **kwargs: Any @@ -563,7 +525,7 @@ def get( """Get service task information. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The GET method retrieves information about a service task. + DMS (classic) instance. The GET method retrieves information about a service task. :param group_name: Name of the resource group. Required. :type group_name: str @@ -573,12 +535,11 @@ def get( :type task_name: str :param expand: Expand the response. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -589,27 +550,24 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( group_name=group_name, service_name=service_name, task_name=task_name, subscription_id=self._config.subscription_id, expand=expand, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -619,16 +577,12 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}" - } + return deserialized # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -642,7 +596,8 @@ def delete( # pylint: disable=inconsistent-return-statements """Delete service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The DELETE method deletes a service task, canceling it first if it's running. + DMS (classic) instance. The DELETE method deletes a service task, canceling it first if it's + running. :param group_name: Name of the resource group. Required. :type group_name: str @@ -653,12 +608,11 @@ def delete( # pylint: disable=inconsistent-return-statements :param delete_running_tasks: Delete the resource even if it contains running tasks. Default value is None. :type delete_running_tasks: bool - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -669,27 +623,24 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( group_name=group_name, service_name=service_name, task_name=task_name, subscription_id=self._config.subscription_id, delete_running_tasks=delete_running_tasks, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -700,11 +651,7 @@ def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}" - } + return cls(pipeline_response, None, {}) # type: ignore @overload def update( @@ -720,8 +667,8 @@ def update( """Create or update service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The PATCH method updates an existing service task, but since service tasks have - no mutable custom properties, there is little reason to do so. + DMS (classic) instance. The PATCH method updates an existing service task, but since service + tasks have no mutable custom properties, there is little reason to do so. :param group_name: Name of the resource group. Required. :type group_name: str @@ -734,7 +681,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -746,7 +692,7 @@ def update( group_name: str, service_name: str, task_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -754,8 +700,8 @@ def update( """Create or update service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The PATCH method updates an existing service task, but since service tasks have - no mutable custom properties, there is little reason to do so. + DMS (classic) instance. The PATCH method updates an existing service task, but since service + tasks have no mutable custom properties, there is little reason to do so. :param group_name: Name of the resource group. Required. :type group_name: str @@ -764,11 +710,10 @@ def update( :param task_name: Name of the Task. Required. :type task_name: str :param parameters: Information about the task. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -780,14 +725,14 @@ def update( group_name: str, service_name: str, task_name: str, - parameters: Union[_models.ProjectTask, IO], + parameters: Union[_models.ProjectTask, IO[bytes]], **kwargs: Any ) -> _models.ProjectTask: """Create or update service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. The PATCH method updates an existing service task, but since service tasks have - no mutable custom properties, there is little reason to do so. + DMS (classic) instance. The PATCH method updates an existing service task, but since service + tasks have no mutable custom properties, there is little reason to do so. :param group_name: Name of the resource group. Required. :type group_name: str @@ -795,17 +740,14 @@ def update( :type service_name: str :param task_name: Name of the Task. Required. :type task_name: str - :param parameters: Information about the task. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes] + type. Required. + :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes] :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -816,21 +758,19 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "ProjectTask") - request = build_update_request( + _request = build_update_request( group_name=group_name, service_name=service_name, task_name=task_name, @@ -839,15 +779,14 @@ def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -857,23 +796,19 @@ def update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}" - } + return deserialized # type: ignore @distributed_trace def cancel(self, group_name: str, service_name: str, task_name: str, **kwargs: Any) -> _models.ProjectTask: """Cancel a service task. The service tasks resource is a nested, proxy-only resource representing work performed by a - DMS instance. This method cancels a service task if it's currently queued or running. + DMS (classic) instance. This method cancels a service task if it's currently queued or running. :param group_name: Name of the resource group. Required. :type group_name: str @@ -881,12 +816,11 @@ def cancel(self, group_name: str, service_name: str, task_name: str, **kwargs: A :type service_name: str :param task_name: Name of the Task. Required. :type task_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -897,26 +831,23 @@ def cancel(self, group_name: str, service_name: str, task_name: str, **kwargs: A _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) - request = build_cancel_request( + _request = build_cancel_request( group_name=group_name, service_name=service_name, task_name=task_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.cancel.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -926,13 +857,9 @@ def cancel(self, group_name: str, service_name: str, task_name: str, **kwargs: A error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - cancel.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/serviceTasks/{taskName}/cancel" - } + return deserialized # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_services_operations.py index baf122c6c3e6..388f48d70fac 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_services_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_services_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -16,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -30,12 +32,11 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -49,9 +50,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -66,7 +65,7 @@ def build_create_or_update_request( "serviceName": _SERIALIZER.url("service_name", service_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -83,9 +82,7 @@ def build_get_request(group_name: str, service_name: str, subscription_id: str, _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -99,7 +96,7 @@ def build_get_request(group_name: str, service_name: str, subscription_id: str, "serviceName": _SERIALIZER.url("service_name", service_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -121,9 +118,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -137,7 +132,7 @@ def build_delete_request( "serviceName": _SERIALIZER.url("service_name", service_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -154,9 +149,7 @@ def build_update_request(group_name: str, service_name: str, subscription_id: st _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -171,7 +164,7 @@ def build_update_request(group_name: str, service_name: str, subscription_id: st "serviceName": _SERIALIZER.url("service_name", service_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -188,9 +181,7 @@ def build_check_status_request(group_name: str, service_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -204,7 +195,7 @@ def build_check_status_request(group_name: str, service_name: str, subscription_ "serviceName": _SERIALIZER.url("service_name", service_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -219,9 +210,7 @@ def build_start_request(group_name: str, service_name: str, subscription_id: str _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -235,7 +224,7 @@ def build_start_request(group_name: str, service_name: str, subscription_id: str "serviceName": _SERIALIZER.url("service_name", service_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -250,9 +239,7 @@ def build_stop_request(group_name: str, service_name: str, subscription_id: str, _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -266,7 +253,7 @@ def build_stop_request(group_name: str, service_name: str, subscription_id: str, "serviceName": _SERIALIZER.url("service_name", service_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -281,9 +268,7 @@ def build_list_skus_request(group_name: str, service_name: str, subscription_id: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -297,7 +282,7 @@ def build_list_skus_request(group_name: str, service_name: str, subscription_id: "serviceName": _SERIALIZER.url("service_name", service_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -308,15 +293,13 @@ def build_list_skus_request(group_name: str, service_name: str, subscription_id: return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_check_children_name_availability_request( +def build_check_children_name_availability_request( # pylint: disable=name-too-long group_name: str, service_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -331,7 +314,7 @@ def build_check_children_name_availability_request( "serviceName": _SERIALIZER.url("service_name", service_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -348,9 +331,7 @@ def build_list_by_resource_group_request(group_name: str, subscription_id: str, _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -363,7 +344,7 @@ def build_list_by_resource_group_request(group_name: str, subscription_id: str, "groupName": _SERIALIZER.url("group_name", group_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -378,9 +359,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -389,7 +368,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -404,9 +383,7 @@ def build_check_name_availability_request(location: str, subscription_id: str, * _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -420,7 +397,7 @@ def build_check_name_availability_request(location: str, subscription_id: str, * "location": _SERIALIZER.url("location", location, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -453,9 +430,13 @@ def __init__(self, *args, **kwargs): self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") def _create_or_update_initial( - self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any - ) -> Optional[_models.DataMigrationService]: - error_map = { + self, + group_name: str, + service_name: str, + parameters: Union[_models.DataMigrationService, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -466,21 +447,19 @@ def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.DataMigrationService]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DataMigrationService") - request = build_create_or_update_request( + _request = build_create_or_update_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, @@ -488,39 +467,34 @@ def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("DataMigrationService", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("DataMigrationService", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}" - } + return deserialized # type: ignore @overload def begin_create_or_update( @@ -532,16 +506,16 @@ def begin_create_or_update( content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.DataMigrationService]: - """Create or update DMS Instance. + """Create or update DMS (classic) Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The PUT method creates a new service or updates an existing one. When a service is updated, - existing child resources (i.e. tasks) are unaffected. Services currently support a single kind, - "vm", which refers to a VM-based service, although other kinds may be added in the future. This - method can change the kind, SKU, and network of the service, but if tasks are currently running - (i.e. the service is busy), this will fail with 400 Bad Request ("ServiceIsBusy"). The provider - will reply when successful with 200 OK or 201 Created. Long-running operations use the - provisioningState property. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The PUT method creates a new service or updates an existing one. When a + service is updated, existing child resources (i.e. tasks) are unaffected. Services currently + support a single kind, "vm", which refers to a VM-based service, although other kinds may be + added in the future. This method can change the kind, SKU, and network of the service, but if + tasks are currently running (i.e. the service is busy), this will fail with 400 Bad Request + ("ServiceIsBusy"). The provider will reply when successful with 200 OK or 201 Created. + Long-running operations use the provisioningState property. :param group_name: Name of the resource group. Required. :type group_name: str @@ -552,14 +526,6 @@ def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either DataMigrationService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DataMigrationService] @@ -571,39 +537,31 @@ def begin_create_or_update( self, group_name: str, service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.DataMigrationService]: - """Create or update DMS Instance. + """Create or update DMS (classic) Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The PUT method creates a new service or updates an existing one. When a service is updated, - existing child resources (i.e. tasks) are unaffected. Services currently support a single kind, - "vm", which refers to a VM-based service, although other kinds may be added in the future. This - method can change the kind, SKU, and network of the service, but if tasks are currently running - (i.e. the service is busy), this will fail with 400 Bad Request ("ServiceIsBusy"). The provider - will reply when successful with 200 OK or 201 Created. Long-running operations use the - provisioningState property. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The PUT method creates a new service or updates an existing one. When a + service is updated, existing child resources (i.e. tasks) are unaffected. Services currently + support a single kind, "vm", which refers to a VM-based service, although other kinds may be + added in the future. This method can change the kind, SKU, and network of the service, but if + tasks are currently running (i.e. the service is busy), this will fail with 400 Bad Request + ("ServiceIsBusy"). The provider will reply when successful with 200 OK or 201 Created. + Long-running operations use the provisioningState property. :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str :param parameters: Information about the service. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either DataMigrationService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DataMigrationService] @@ -612,37 +570,30 @@ def begin_create_or_update( @distributed_trace def begin_create_or_update( - self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any + self, + group_name: str, + service_name: str, + parameters: Union[_models.DataMigrationService, IO[bytes]], + **kwargs: Any ) -> LROPoller[_models.DataMigrationService]: - """Create or update DMS Instance. + """Create or update DMS (classic) Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The PUT method creates a new service or updates an existing one. When a service is updated, - existing child resources (i.e. tasks) are unaffected. Services currently support a single kind, - "vm", which refers to a VM-based service, although other kinds may be added in the future. This - method can change the kind, SKU, and network of the service, but if tasks are currently running - (i.e. the service is busy), this will fail with 400 Bad Request ("ServiceIsBusy"). The provider - will reply when successful with 200 OK or 201 Created. Long-running operations use the - provisioningState property. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The PUT method creates a new service or updates an existing one. When a + service is updated, existing child resources (i.e. tasks) are unaffected. Services currently + support a single kind, "vm", which refers to a VM-based service, although other kinds may be + added in the future. This method can change the kind, SKU, and network of the service, but if + tasks are currently running (i.e. the service is busy), this will fail with 400 Bad Request + ("ServiceIsBusy"). The provider will reply when successful with 200 OK or 201 Created. + Long-running operations use the provisioningState property. :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :param parameters: Information about the service. Is either a model type or a IO type. - Required. - :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Information about the service. Is either a DataMigrationService type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO[bytes] :return: An instance of LROPoller that returns either DataMigrationService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DataMigrationService] @@ -651,9 +602,7 @@ def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataMigrationService] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -671,12 +620,13 @@ def begin_create_or_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DataMigrationService", pipeline_response) + deserialized = self._deserialize("DataMigrationService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -686,35 +636,32 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.DataMigrationService].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}" - } + return LROPoller[_models.DataMigrationService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @distributed_trace def get(self, group_name: str, service_name: str, **kwargs: Any) -> _models.DataMigrationService: - """Get DMS Service Instance. + """Get DMS (classic) Service Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The GET method retrieves information about a service instance. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The GET method retrieves information about a service instance. :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataMigrationService or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DataMigrationService :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -725,25 +672,22 @@ def get(self, group_name: str, service_name: str, **kwargs: Any) -> _models.Data _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataMigrationService] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -753,21 +697,17 @@ def get(self, group_name: str, service_name: str, **kwargs: Any) -> _models.Data error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataMigrationService", pipeline_response) + deserialized = self._deserialize("DataMigrationService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized + return deserialized # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}" - } - - def _delete_initial( # pylint: disable=inconsistent-return-statements + def _delete_initial( self, group_name: str, service_name: str, delete_running_tasks: Optional[bool] = None, **kwargs: Any - ) -> None: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -778,50 +718,52 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, delete_running_tasks=delete_running_tasks, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}" - } + return deserialized # type: ignore @distributed_trace def begin_delete( self, group_name: str, service_name: str, delete_running_tasks: Optional[bool] = None, **kwargs: Any ) -> LROPoller[None]: - """Delete DMS Service Instance. + """Delete DMS (classic) Service Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The DELETE method deletes a service. Any running tasks will be canceled. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The DELETE method deletes a service. Any running tasks will be canceled. :param group_name: Name of the resource group. Required. :type group_name: str @@ -830,14 +772,6 @@ def begin_delete( :param delete_running_tasks: Delete the resource even if it contains running tasks. Default value is None. :type delete_running_tasks: bool - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -845,15 +779,13 @@ def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( group_name=group_name, service_name=service_name, delete_running_tasks=delete_running_tasks, @@ -863,11 +795,12 @@ def begin_delete( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -876,22 +809,22 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore def _update_initial( - self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any - ) -> Optional[_models.DataMigrationService]: - error_map = { + self, + group_name: str, + service_name: str, + parameters: Union[_models.DataMigrationService, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -902,21 +835,19 @@ def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.DataMigrationService]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DataMigrationService") - request = build_update_request( + _request = build_update_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, @@ -924,36 +855,34 @@ def _update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("DataMigrationService", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}" - } + return deserialized # type: ignore @overload def begin_update( @@ -965,12 +894,12 @@ def begin_update( content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.DataMigrationService]: - """Create or update DMS Service Instance. + """Create or update DMS (classic) Service Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The PATCH method updates an existing service. This method can change the kind, SKU, and network - of the service, but if tasks are currently running (i.e. the service is busy), this will fail - with 400 Bad Request ("ServiceIsBusy"). + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The PATCH method updates an existing service. This method can change the + kind, SKU, and network of the service, but if tasks are currently running (i.e. the service is + busy), this will fail with 400 Bad Request ("ServiceIsBusy"). :param group_name: Name of the resource group. Required. :type group_name: str @@ -981,14 +910,6 @@ def begin_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either DataMigrationService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DataMigrationService] @@ -1000,35 +921,27 @@ def begin_update( self, group_name: str, service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.DataMigrationService]: - """Create or update DMS Service Instance. + """Create or update DMS (classic) Service Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The PATCH method updates an existing service. This method can change the kind, SKU, and network - of the service, but if tasks are currently running (i.e. the service is busy), this will fail - with 400 Bad Request ("ServiceIsBusy"). + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The PATCH method updates an existing service. This method can change the + kind, SKU, and network of the service, but if tasks are currently running (i.e. the service is + busy), this will fail with 400 Bad Request ("ServiceIsBusy"). :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str :param parameters: Information about the service. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either DataMigrationService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DataMigrationService] @@ -1037,33 +950,26 @@ def begin_update( @distributed_trace def begin_update( - self, group_name: str, service_name: str, parameters: Union[_models.DataMigrationService, IO], **kwargs: Any + self, + group_name: str, + service_name: str, + parameters: Union[_models.DataMigrationService, IO[bytes]], + **kwargs: Any ) -> LROPoller[_models.DataMigrationService]: - """Create or update DMS Service Instance. + """Create or update DMS (classic) Service Instance. - The services resource is the top-level resource that represents the Database Migration Service. - The PATCH method updates an existing service. This method can change the kind, SKU, and network - of the service, but if tasks are currently running (i.e. the service is busy), this will fail - with 400 Bad Request ("ServiceIsBusy"). + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). The PATCH method updates an existing service. This method can change the + kind, SKU, and network of the service, but if tasks are currently running (i.e. the service is + busy), this will fail with 400 Bad Request ("ServiceIsBusy"). :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :param parameters: Information about the service. Is either a model type or a IO type. - Required. - :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Information about the service. Is either a DataMigrationService type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DataMigrationService or IO[bytes] :return: An instance of LROPoller that returns either DataMigrationService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.DataMigrationService] @@ -1072,9 +978,7 @@ def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataMigrationService] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -1092,12 +996,13 @@ def begin_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DataMigrationService", pipeline_response) + deserialized = self._deserialize("DataMigrationService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -1107,17 +1012,15 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.DataMigrationService].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}" - } + return LROPoller[_models.DataMigrationService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @distributed_trace def check_status( @@ -1125,20 +1028,19 @@ def check_status( ) -> _models.DataMigrationServiceStatusResponse: """Check service health status. - The services resource is the top-level resource that represents the Database Migration Service. - This action performs a health check and returns the status of the service and virtual machine - size. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). This action performs a health check and returns the status of the service + and virtual machine size. :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DataMigrationServiceStatusResponse or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DataMigrationServiceStatusResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1149,25 +1051,22 @@ def check_status( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataMigrationServiceStatusResponse] = kwargs.pop("cls", None) - request = build_check_status_request( + _request = build_check_status_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.check_status.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1177,21 +1076,15 @@ def check_status( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataMigrationServiceStatusResponse", pipeline_response) + deserialized = self._deserialize("DataMigrationServiceStatusResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized + return deserialized # type: ignore - check_status.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/checkStatus" - } - - def _start_initial( # pylint: disable=inconsistent-return-statements - self, group_name: str, service_name: str, **kwargs: Any - ) -> None: - error_map = { + def _start_initial(self, group_name: str, service_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1202,60 +1095,55 @@ def _start_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - request = build_start_request( + _request = build_start_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._start_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _start_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/start" - } + return deserialized # type: ignore @distributed_trace def begin_start(self, group_name: str, service_name: str, **kwargs: Any) -> LROPoller[None]: """Start service. - The services resource is the top-level resource that represents the Database Migration Service. - This action starts the service and the service can be used for data migration. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). This action starts the service and the service can be used for data + migration. :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1263,15 +1151,13 @@ def begin_start(self, group_name: str, service_name: str, **kwargs: Any) -> LROP _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._start_initial( # type: ignore + raw_result = self._start_initial( group_name=group_name, service_name=service_name, api_version=api_version, @@ -1280,11 +1166,12 @@ def begin_start(self, group_name: str, service_name: str, **kwargs: Any) -> LROP params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -1293,22 +1180,16 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_start.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/start" - } - - def _stop_initial( # pylint: disable=inconsistent-return-statements - self, group_name: str, service_name: str, **kwargs: Any - ) -> None: - error_map = { + def _stop_initial(self, group_name: str, service_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1319,61 +1200,55 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - request = build_stop_request( + _request = build_stop_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._stop_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _stop_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/stop" - } + return deserialized # type: ignore @distributed_trace def begin_stop(self, group_name: str, service_name: str, **kwargs: Any) -> LROPoller[None]: """Stop service. - The services resource is the top-level resource that represents the Database Migration Service. - This action stops the service and the service cannot be used for data migration. The service - owner won't be billed when the service is stopped. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). This action stops the service and the service cannot be used for data + migration. The service owner won't be billed when the service is stopped. :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1381,15 +1256,13 @@ def begin_stop(self, group_name: str, service_name: str, **kwargs: Any) -> LROPo _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._stop_initial( # type: ignore + raw_result = self._stop_initial( group_name=group_name, service_name=service_name, api_version=api_version, @@ -1398,11 +1271,12 @@ def begin_stop(self, group_name: str, service_name: str, **kwargs: Any) -> LROPo params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -1411,30 +1285,25 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_stop.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/stop" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace def list_skus(self, group_name: str, service_name: str, **kwargs: Any) -> Iterable["_models.AvailableServiceSku"]: """Get compatible SKUs. - The services resource is the top-level resource that represents the Database Migration Service. - The skus action returns the list of SKUs that a service resource can be updated to. + The services resource is the top-level resource that represents the Database Migration Service + (classic). The skus action returns the list of SKUs that a service resource can be updated to. :param group_name: Name of the resource group. Required. :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AvailableServiceSku or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.AvailableServiceSku] :raises ~azure.core.exceptions.HttpResponseError: @@ -1442,12 +1311,10 @@ def list_skus(self, group_name: str, service_name: str, **kwargs: Any) -> Iterab _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ServiceSkuList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1458,17 +1325,15 @@ def list_skus(self, group_name: str, service_name: str, **kwargs: Any) -> Iterab def prepare_request(next_link=None): if not next_link: - request = build_list_skus_request( + _request = build_list_skus_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_skus.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -1480,13 +1345,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("ServiceSkuList", pipeline_response) @@ -1496,10 +1360,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1512,10 +1377,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_skus.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/skus" - } - @overload def check_children_name_availability( self, @@ -1539,7 +1400,6 @@ def check_children_name_availability( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: NameAvailabilityResponse or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse :raises ~azure.core.exceptions.HttpResponseError: @@ -1550,7 +1410,7 @@ def check_children_name_availability( self, group_name: str, service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -1564,11 +1424,10 @@ def check_children_name_availability( :param service_name: Name of the service. Required. :type service_name: str :param parameters: Requested name to validate. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: NameAvailabilityResponse or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse :raises ~azure.core.exceptions.HttpResponseError: @@ -1576,7 +1435,11 @@ def check_children_name_availability( @distributed_trace def check_children_name_availability( - self, group_name: str, service_name: str, parameters: Union[_models.NameAvailabilityRequest, IO], **kwargs: Any + self, + group_name: str, + service_name: str, + parameters: Union[_models.NameAvailabilityRequest, IO[bytes]], + **kwargs: Any ) -> _models.NameAvailabilityResponse: """Check nested resource name validity and availability. @@ -1586,17 +1449,14 @@ def check_children_name_availability( :type group_name: str :param service_name: Name of the service. Required. :type service_name: str - :param parameters: Requested name to validate. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Requested name to validate. Is either a NameAvailabilityRequest type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO[bytes] :return: NameAvailabilityResponse or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1607,21 +1467,19 @@ def check_children_name_availability( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.NameAvailabilityResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "NameAvailabilityRequest") - request = build_check_children_name_availability_request( + _request = build_check_children_name_availability_request( group_name=group_name, service_name=service_name, subscription_id=self._config.subscription_id, @@ -1629,15 +1487,14 @@ def check_children_name_availability( content_type=content_type, json=_json, content=_content, - template_url=self.check_children_name_availability.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1647,27 +1504,22 @@ def check_children_name_availability( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response) + deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - check_children_name_availability.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/checkNameAvailability" - } + return deserialized # type: ignore @distributed_trace def list_by_resource_group(self, group_name: str, **kwargs: Any) -> Iterable["_models.DataMigrationService"]: """Get services in resource group. - The Services resource is the top-level resource that represents the Database Migration Service. - This method returns a list of service resources in a resource group. + The Services resource is the top-level resource that represents the Azure Database Migration + Service (classic). This method returns a list of service resources in a resource group. :param group_name: Name of the resource group. Required. :type group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataMigrationService or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.DataMigrationService] @@ -1676,12 +1528,10 @@ def list_by_resource_group(self, group_name: str, **kwargs: Any) -> Iterable["_m _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataMigrationServiceList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1692,16 +1542,14 @@ def list_by_resource_group(self, group_name: str, **kwargs: Any) -> Iterable["_m def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( group_name=group_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -1713,13 +1561,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("DataMigrationServiceList", pipeline_response) @@ -1729,10 +1576,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1745,18 +1593,13 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services" - } - @distributed_trace def list(self, **kwargs: Any) -> Iterable["_models.DataMigrationService"]: """Get services in subscription. - The services resource is the top-level resource that represents the Database Migration Service. - This method returns a list of service resources in a subscription. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). This method returns a list of service resources in a subscription. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataMigrationService or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.DataMigrationService] @@ -1765,12 +1608,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.DataMigrationService"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataMigrationServiceList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1781,15 +1622,13 @@ def list(self, **kwargs: Any) -> Iterable["_models.DataMigrationService"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -1801,13 +1640,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("DataMigrationServiceList", pipeline_response) @@ -1817,10 +1655,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1833,8 +1672,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/services"} - @overload def check_name_availability( self, @@ -1855,7 +1692,6 @@ def check_name_availability( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: NameAvailabilityResponse or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse :raises ~azure.core.exceptions.HttpResponseError: @@ -1863,7 +1699,7 @@ def check_name_availability( @overload def check_name_availability( - self, location: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any + self, location: str, parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any ) -> _models.NameAvailabilityResponse: """Check name validity and availability. @@ -1872,11 +1708,10 @@ def check_name_availability( :param location: The Azure region of the operation. Required. :type location: str :param parameters: Requested name to validate. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: NameAvailabilityResponse or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse :raises ~azure.core.exceptions.HttpResponseError: @@ -1884,7 +1719,7 @@ def check_name_availability( @distributed_trace def check_name_availability( - self, location: str, parameters: Union[_models.NameAvailabilityRequest, IO], **kwargs: Any + self, location: str, parameters: Union[_models.NameAvailabilityRequest, IO[bytes]], **kwargs: Any ) -> _models.NameAvailabilityResponse: """Check name validity and availability. @@ -1892,17 +1727,14 @@ def check_name_availability( :param location: The Azure region of the operation. Required. :type location: str - :param parameters: Requested name to validate. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Requested name to validate. Is either a NameAvailabilityRequest type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.NameAvailabilityRequest or IO[bytes] :return: NameAvailabilityResponse or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.NameAvailabilityResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1913,36 +1745,33 @@ def check_name_availability( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.NameAvailabilityResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "NameAvailabilityRequest") - request = build_check_name_availability_request( + _request = build_check_name_availability_request( location=location, subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, content=_content, - template_url=self.check_name_availability.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1952,13 +1781,9 @@ def check_name_availability( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response) + deserialized = self._deserialize("NameAvailabilityResponse", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - check_name_availability.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/locations/{location}/checkNameAvailability" - } + return deserialized # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_sql_migration_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_sql_migration_services_operations.py index 9fe524f5df4d..27a1cafc9388 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_sql_migration_services_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_sql_migration_services_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -16,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -30,12 +32,11 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -49,9 +50,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -65,7 +64,7 @@ def build_get_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -82,9 +81,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -99,7 +96,7 @@ def build_create_or_update_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -117,9 +114,7 @@ def build_delete_request( ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) # Construct URL _url = kwargs.pop( "template_url", @@ -131,7 +126,7 @@ def build_delete_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -145,9 +140,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -162,7 +155,7 @@ def build_update_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -179,9 +172,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -194,7 +185,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -211,9 +202,7 @@ def build_list_auth_keys_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -227,7 +216,7 @@ def build_list_auth_keys_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -244,9 +233,7 @@ def build_regenerate_auth_keys_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -261,7 +248,7 @@ def build_regenerate_auth_keys_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -280,9 +267,7 @@ def build_delete_node_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -297,7 +282,7 @@ def build_delete_node_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -316,9 +301,7 @@ def build_list_migrations_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -332,7 +315,7 @@ def build_list_migrations_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -349,9 +332,7 @@ def build_list_monitoring_data_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -365,7 +346,7 @@ def build_list_monitoring_data_request( "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -380,9 +361,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -393,7 +372,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -434,12 +413,11 @@ def get( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SqlMigrationService or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.SqlMigrationService :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -450,25 +428,22 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -477,25 +452,21 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SqlMigrationService", pipeline_response) + deserialized = self._deserialize("SqlMigrationService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}" - } + return deserialized # type: ignore def _create_or_update_initial( self, resource_group_name: str, sql_migration_service_name: str, - parameters: Union[_models.SqlMigrationService, IO], + parameters: Union[_models.SqlMigrationService, IO[bytes]], **kwargs: Any - ) -> _models.SqlMigrationService: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -506,21 +477,19 @@ def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "SqlMigrationService") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, @@ -528,38 +497,34 @@ def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("SqlMigrationService", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("SqlMigrationService", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}" - } - @overload def begin_create_or_update( self, @@ -582,14 +547,6 @@ def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either SqlMigrationService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.SqlMigrationService] @@ -601,7 +558,7 @@ def begin_create_or_update( self, resource_group_name: str, sql_migration_service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -614,18 +571,10 @@ def begin_create_or_update( :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str :param parameters: Details of SqlMigrationService resource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either SqlMigrationService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.SqlMigrationService] @@ -637,7 +586,7 @@ def begin_create_or_update( self, resource_group_name: str, sql_migration_service_name: str, - parameters: Union[_models.SqlMigrationService, IO], + parameters: Union[_models.SqlMigrationService, IO[bytes]], **kwargs: Any ) -> LROPoller[_models.SqlMigrationService]: """Create or Update Database Migration Service. @@ -647,20 +596,9 @@ def begin_create_or_update( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationService or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Details of SqlMigrationService resource. Is either a SqlMigrationService + type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationService or IO[bytes] :return: An instance of LROPoller that returns either SqlMigrationService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.SqlMigrationService] @@ -669,9 +607,7 @@ def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -689,12 +625,13 @@ def begin_create_or_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlMigrationService", pipeline_response) + deserialized = self._deserialize("SqlMigrationService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -704,22 +641,20 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.SqlMigrationService].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}" - } + return LROPoller[_models.SqlMigrationService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) - def _delete_initial( # pylint: disable=inconsistent-return-statements + def _delete_initial( self, resource_group_name: str, sql_migration_service_name: str, **kwargs: Any - ) -> None: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -730,39 +665,41 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}" - } + return deserialized # type: ignore @distributed_trace def begin_delete(self, resource_group_name: str, sql_migration_service_name: str, **kwargs: Any) -> LROPoller[None]: @@ -773,14 +710,6 @@ def begin_delete(self, resource_group_name: str, sql_migration_service_name: str :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -788,15 +717,13 @@ def begin_delete(self, resource_group_name: str, sql_migration_service_name: str _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, api_version=api_version, @@ -805,11 +732,12 @@ def begin_delete(self, resource_group_name: str, sql_migration_service_name: str params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -818,26 +746,22 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore def _update_initial( self, resource_group_name: str, sql_migration_service_name: str, - parameters: Union[_models.SqlMigrationServiceUpdate, IO], + parameters: Union[_models.SqlMigrationServiceUpdate, IO[bytes]], **kwargs: Any - ) -> _models.SqlMigrationService: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -848,21 +772,19 @@ def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "SqlMigrationServiceUpdate") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, @@ -870,38 +792,34 @@ def _update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("SqlMigrationService", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("SqlMigrationService", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - _update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}" - } - @overload def begin_update( self, @@ -924,14 +842,6 @@ def begin_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either SqlMigrationService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.SqlMigrationService] @@ -943,7 +853,7 @@ def begin_update( self, resource_group_name: str, sql_migration_service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -956,18 +866,10 @@ def begin_update( :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str :param parameters: Details of SqlMigrationService resource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either SqlMigrationService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.SqlMigrationService] @@ -979,7 +881,7 @@ def begin_update( self, resource_group_name: str, sql_migration_service_name: str, - parameters: Union[_models.SqlMigrationServiceUpdate, IO], + parameters: Union[_models.SqlMigrationServiceUpdate, IO[bytes]], **kwargs: Any ) -> LROPoller[_models.SqlMigrationService]: """Update Database Migration Service. @@ -989,20 +891,9 @@ def begin_update( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationServiceUpdate or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + :param parameters: Details of SqlMigrationService resource. Is either a + SqlMigrationServiceUpdate type or a IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.SqlMigrationServiceUpdate or IO[bytes] :return: An instance of LROPoller that returns either SqlMigrationService or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datamigration.models.SqlMigrationService] @@ -1011,9 +902,7 @@ def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.SqlMigrationService] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -1031,12 +920,13 @@ def begin_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SqlMigrationService", pipeline_response) + deserialized = self._deserialize("SqlMigrationService", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -1046,17 +936,15 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.SqlMigrationService].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}" - } + return LROPoller[_models.SqlMigrationService]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @distributed_trace def list_by_resource_group( @@ -1067,7 +955,6 @@ def list_by_resource_group( :param resource_group_name: Name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. Required. :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either SqlMigrationService or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.SqlMigrationService] :raises ~azure.core.exceptions.HttpResponseError: @@ -1075,12 +962,10 @@ def list_by_resource_group( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SqlMigrationListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1091,16 +976,14 @@ def list_by_resource_group( def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -1112,13 +995,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("SqlMigrationListResult", pipeline_response) @@ -1128,10 +1010,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1143,10 +1026,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices" - } - @distributed_trace def list_auth_keys( self, resource_group_name: str, sql_migration_service_name: str, **kwargs: Any @@ -1158,12 +1037,11 @@ def list_auth_keys( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: AuthenticationKeys or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.AuthenticationKeys :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1174,25 +1052,22 @@ def list_auth_keys( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.AuthenticationKeys] = kwargs.pop("cls", None) - request = build_list_auth_keys_request( + _request = build_list_auth_keys_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_auth_keys.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1201,16 +1076,12 @@ def list_auth_keys( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("AuthenticationKeys", pipeline_response) + deserialized = self._deserialize("AuthenticationKeys", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - list_auth_keys.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/listAuthKeys" - } + return deserialized # type: ignore @overload def regenerate_auth_keys( @@ -1234,7 +1105,6 @@ def regenerate_auth_keys( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: RegenAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.RegenAuthKeys :raises ~azure.core.exceptions.HttpResponseError: @@ -1245,7 +1115,7 @@ def regenerate_auth_keys( self, resource_group_name: str, sql_migration_service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -1258,11 +1128,10 @@ def regenerate_auth_keys( :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str :param parameters: Details of SqlMigrationService resource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: RegenAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.RegenAuthKeys :raises ~azure.core.exceptions.HttpResponseError: @@ -1273,7 +1142,7 @@ def regenerate_auth_keys( self, resource_group_name: str, sql_migration_service_name: str, - parameters: Union[_models.RegenAuthKeys, IO], + parameters: Union[_models.RegenAuthKeys, IO[bytes]], **kwargs: Any ) -> _models.RegenAuthKeys: """Regenerate a new set of Authentication Keys for Self Hosted Integration Runtime. @@ -1283,18 +1152,14 @@ def regenerate_auth_keys( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.datamigration.models.RegenAuthKeys or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Details of SqlMigrationService resource. Is either a RegenAuthKeys type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.RegenAuthKeys or IO[bytes] :return: RegenAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.RegenAuthKeys :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1305,21 +1170,19 @@ def regenerate_auth_keys( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.RegenAuthKeys] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "RegenAuthKeys") - request = build_regenerate_auth_keys_request( + _request = build_regenerate_auth_keys_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, @@ -1327,15 +1190,14 @@ def regenerate_auth_keys( content_type=content_type, json=_json, content=_content, - template_url=self.regenerate_auth_keys.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1344,16 +1206,12 @@ def regenerate_auth_keys( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("RegenAuthKeys", pipeline_response) + deserialized = self._deserialize("RegenAuthKeys", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - regenerate_auth_keys.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/regenerateAuthKeys" - } + return deserialized # type: ignore @overload def delete_node( @@ -1377,7 +1235,6 @@ def delete_node( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DeleteNode or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DeleteNode :raises ~azure.core.exceptions.HttpResponseError: @@ -1388,7 +1245,7 @@ def delete_node( self, resource_group_name: str, sql_migration_service_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -1401,11 +1258,10 @@ def delete_node( :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str :param parameters: Details of SqlMigrationService resource. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: DeleteNode or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DeleteNode :raises ~azure.core.exceptions.HttpResponseError: @@ -1416,7 +1272,7 @@ def delete_node( self, resource_group_name: str, sql_migration_service_name: str, - parameters: Union[_models.DeleteNode, IO], + parameters: Union[_models.DeleteNode, IO[bytes]], **kwargs: Any ) -> _models.DeleteNode: """Delete the integration runtime node. @@ -1426,18 +1282,14 @@ def delete_node( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :param parameters: Details of SqlMigrationService resource. Is either a model type or a IO - type. Required. - :type parameters: ~azure.mgmt.datamigration.models.DeleteNode or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Details of SqlMigrationService resource. Is either a DeleteNode type or a + IO[bytes] type. Required. + :type parameters: ~azure.mgmt.datamigration.models.DeleteNode or IO[bytes] :return: DeleteNode or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.DeleteNode :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1448,21 +1300,19 @@ def delete_node( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DeleteNode] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "DeleteNode") - request = build_delete_node_request( + _request = build_delete_node_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, @@ -1470,15 +1320,14 @@ def delete_node( content_type=content_type, json=_json, content=_content, - template_url=self.delete_node.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1487,16 +1336,12 @@ def delete_node( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DeleteNode", pipeline_response) + deserialized = self._deserialize("DeleteNode", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - delete_node.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/deleteNode" - } + return deserialized # type: ignore @distributed_trace def list_migrations( @@ -1509,7 +1354,6 @@ def list_migrations( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DatabaseMigration or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.DatabaseMigration] :raises ~azure.core.exceptions.HttpResponseError: @@ -1517,12 +1361,10 @@ def list_migrations( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DatabaseMigrationListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1533,17 +1375,15 @@ def list_migrations( def prepare_request(next_link=None): if not next_link: - request = build_list_migrations_request( + _request = build_list_migrations_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_migrations.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -1555,13 +1395,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("DatabaseMigrationListResult", pipeline_response) @@ -1571,10 +1410,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1586,10 +1426,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_migrations.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/listMigrations" - } - @distributed_trace def list_monitoring_data( self, resource_group_name: str, sql_migration_service_name: str, **kwargs: Any @@ -1602,12 +1438,11 @@ def list_monitoring_data( :type resource_group_name: str :param sql_migration_service_name: Name of the SQL Migration Service. Required. :type sql_migration_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeMonitoringData or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.IntegrationRuntimeMonitoringData :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1618,25 +1453,22 @@ def list_monitoring_data( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeMonitoringData] = kwargs.pop("cls", None) - request = build_list_monitoring_data_request( + _request = build_list_monitoring_data_request( resource_group_name=resource_group_name, sql_migration_service_name=sql_migration_service_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_monitoring_data.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1645,22 +1477,17 @@ def list_monitoring_data( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - list_monitoring_data.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataMigration/sqlMigrationServices/{sqlMigrationServiceName}/listMonitoringData" - } + return deserialized # type: ignore @distributed_trace def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.SqlMigrationService"]: """Retrieve all SQL migration services in the subscriptions. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either SqlMigrationService or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.SqlMigrationService] :raises ~azure.core.exceptions.HttpResponseError: @@ -1668,12 +1495,10 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.SqlMigrationS _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SqlMigrationListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1684,15 +1509,13 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.SqlMigrationS def prepare_request(next_link=None): if not next_link: - request = build_list_by_subscription_request( + _request = build_list_by_subscription_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_subscription.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -1704,13 +1527,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("SqlMigrationListResult", pipeline_response) @@ -1720,10 +1542,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1734,7 +1557,3 @@ def get_next(next_link=None): return pipeline_response return ItemPaged(get_next, extract_data) - - list_by_subscription.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/sqlMigrationServices" - } diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_tasks_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_tasks_operations.py index 1816624493cb..0b9da588e106 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_tasks_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_tasks_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload import urllib.parse from azure.core.exceptions import ( @@ -20,20 +21,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -53,9 +52,7 @@ def build_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -70,7 +67,7 @@ def build_list_request( "projectName": _SERIALIZER.url("project_name", project_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -89,9 +86,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -108,7 +103,7 @@ def build_create_or_update_request( "taskName": _SERIALIZER.url("task_name", task_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -134,9 +129,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -152,7 +145,7 @@ def build_get_request( "taskName": _SERIALIZER.url("task_name", task_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -178,9 +171,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -196,7 +187,7 @@ def build_delete_request( "taskName": _SERIALIZER.url("task_name", task_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -215,9 +206,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -234,7 +223,7 @@ def build_update_request( "taskName": _SERIALIZER.url("task_name", task_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -253,9 +242,7 @@ def build_cancel_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -271,7 +258,7 @@ def build_cancel_request( "taskName": _SERIALIZER.url("task_name", task_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -288,9 +275,7 @@ def build_command_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -307,7 +292,7 @@ def build_command_request( "taskName": _SERIALIZER.url("task_name", task_name, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -345,9 +330,10 @@ def list( ) -> Iterable["_models.ProjectTask"]: """Get tasks in a service. - The services resource is the top-level resource that represents the Database Migration Service. - This method returns a list of tasks owned by a service resource. Some tasks may have a status - of Unknown, which indicates that an error occurred while querying the status of that task. + The services resource is the top-level resource that represents the Azure Database Migration + Service (classic). This method returns a list of tasks owned by a service resource. Some tasks + may have a status of Unknown, which indicates that an error occurred while querying the status + of that task. :param group_name: Name of the resource group. Required. :type group_name: str @@ -357,7 +343,6 @@ def list( :type project_name: str :param task_type: Filter tasks by task type. Default value is None. :type task_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ProjectTask or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.ProjectTask] :raises ~azure.core.exceptions.HttpResponseError: @@ -365,12 +350,10 @@ def list( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.TaskList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -381,19 +364,17 @@ def list( def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( group_name=group_name, service_name=service_name, project_name=project_name, subscription_id=self._config.subscription_id, task_type=task_type, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -405,13 +386,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("TaskList", pipeline_response) @@ -421,10 +401,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -437,10 +418,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks" - } - @overload def create_or_update( self, @@ -456,8 +433,9 @@ def create_or_update( """Create or update task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The PUT method creates a new task or updates an existing one, although since tasks - have no mutable custom properties, there is little reason to update an existing one. + (classic) instance. The PUT method creates a new task or updates an existing one, although + since tasks have no mutable custom properties, there is little reason to update an existing + one. :param group_name: Name of the resource group. Required. :type group_name: str @@ -472,7 +450,6 @@ def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -485,7 +462,7 @@ def create_or_update( service_name: str, project_name: str, task_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -493,8 +470,9 @@ def create_or_update( """Create or update task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The PUT method creates a new task or updates an existing one, although since tasks - have no mutable custom properties, there is little reason to update an existing one. + (classic) instance. The PUT method creates a new task or updates an existing one, although + since tasks have no mutable custom properties, there is little reason to update an existing + one. :param group_name: Name of the resource group. Required. :type group_name: str @@ -505,11 +483,10 @@ def create_or_update( :param task_name: Name of the Task. Required. :type task_name: str :param parameters: Information about the task. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -522,14 +499,15 @@ def create_or_update( service_name: str, project_name: str, task_name: str, - parameters: Union[_models.ProjectTask, IO], + parameters: Union[_models.ProjectTask, IO[bytes]], **kwargs: Any ) -> _models.ProjectTask: """Create or update task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The PUT method creates a new task or updates an existing one, although since tasks - have no mutable custom properties, there is little reason to update an existing one. + (classic) instance. The PUT method creates a new task or updates an existing one, although + since tasks have no mutable custom properties, there is little reason to update an existing + one. :param group_name: Name of the resource group. Required. :type group_name: str @@ -539,17 +517,14 @@ def create_or_update( :type project_name: str :param task_name: Name of the Task. Required. :type task_name: str - :param parameters: Information about the task. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes] + type. Required. + :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes] :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -560,21 +535,19 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "ProjectTask") - request = build_create_or_update_request( + _request = build_create_or_update_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -584,15 +557,14 @@ def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -602,21 +574,13 @@ def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("ProjectTask", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}" - } - @distributed_trace def get( self, @@ -630,7 +594,7 @@ def get( """Get task information. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The GET method retrieves information about a task. + (classic) instance. The GET method retrieves information about a task. :param group_name: Name of the resource group. Required. :type group_name: str @@ -642,12 +606,11 @@ def get( :type task_name: str :param expand: Expand the response. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -658,12 +621,10 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -671,15 +632,14 @@ def get( subscription_id=self._config.subscription_id, expand=expand, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -689,16 +649,12 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}" - } + return deserialized # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -713,7 +669,7 @@ def delete( # pylint: disable=inconsistent-return-statements """Delete task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The DELETE method deletes a task, canceling it first if it's running. + (classic) instance. The DELETE method deletes a task, canceling it first if it's running. :param group_name: Name of the resource group. Required. :type group_name: str @@ -726,12 +682,11 @@ def delete( # pylint: disable=inconsistent-return-statements :param delete_running_tasks: Delete the resource even if it contains running tasks. Default value is None. :type delete_running_tasks: bool - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -742,12 +697,10 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -755,15 +708,14 @@ def delete( # pylint: disable=inconsistent-return-statements subscription_id=self._config.subscription_id, delete_running_tasks=delete_running_tasks, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -774,11 +726,7 @@ def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}" - } + return cls(pipeline_response, None, {}) # type: ignore @overload def update( @@ -795,8 +743,8 @@ def update( """Create or update task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The PATCH method updates an existing task, but since tasks have no mutable custom - properties, there is little reason to do so. + (classic) instance. The PATCH method updates an existing task, but since tasks have no mutable + custom properties, there is little reason to do so. :param group_name: Name of the resource group. Required. :type group_name: str @@ -811,7 +759,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -824,7 +771,7 @@ def update( service_name: str, project_name: str, task_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -832,8 +779,8 @@ def update( """Create or update task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The PATCH method updates an existing task, but since tasks have no mutable custom - properties, there is little reason to do so. + (classic) instance. The PATCH method updates an existing task, but since tasks have no mutable + custom properties, there is little reason to do so. :param group_name: Name of the resource group. Required. :type group_name: str @@ -844,11 +791,10 @@ def update( :param task_name: Name of the Task. Required. :type task_name: str :param parameters: Information about the task. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: @@ -861,14 +807,14 @@ def update( service_name: str, project_name: str, task_name: str, - parameters: Union[_models.ProjectTask, IO], + parameters: Union[_models.ProjectTask, IO[bytes]], **kwargs: Any ) -> _models.ProjectTask: """Create or update task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. The PATCH method updates an existing task, but since tasks have no mutable custom - properties, there is little reason to do so. + (classic) instance. The PATCH method updates an existing task, but since tasks have no mutable + custom properties, there is little reason to do so. :param group_name: Name of the resource group. Required. :type group_name: str @@ -878,17 +824,14 @@ def update( :type project_name: str :param task_name: Name of the Task. Required. :type task_name: str - :param parameters: Information about the task. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Information about the task. Is either a ProjectTask type or a IO[bytes] + type. Required. + :type parameters: ~azure.mgmt.datamigration.models.ProjectTask or IO[bytes] :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -899,21 +842,19 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "ProjectTask") - request = build_update_request( + _request = build_update_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -923,15 +864,14 @@ def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -941,16 +881,12 @@ def update( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}" - } + return deserialized # type: ignore @distributed_trace def cancel( @@ -959,7 +895,7 @@ def cancel( """Cancel a task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. This method cancels a task if it's currently queued or running. + (classic) instance. This method cancels a task if it's currently queued or running. :param group_name: Name of the resource group. Required. :type group_name: str @@ -969,12 +905,11 @@ def cancel( :type project_name: str :param task_name: Name of the Task. Required. :type task_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -985,27 +920,24 @@ def cancel( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ProjectTask] = kwargs.pop("cls", None) - request = build_cancel_request( + _request = build_cancel_request( group_name=group_name, service_name=service_name, project_name=project_name, task_name=task_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.cancel.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1015,16 +947,12 @@ def cancel( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("ProjectTask", pipeline_response) + deserialized = self._deserialize("ProjectTask", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - cancel.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}/cancel" - } + return deserialized # type: ignore @overload def command( @@ -1041,7 +969,7 @@ def command( """Execute a command on a task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. This method executes a command on a running task. + (classic) instance. This method executes a command on a running task. :param group_name: Name of the resource group. Required. :type group_name: str @@ -1056,7 +984,6 @@ def command( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: CommandProperties or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.CommandProperties :raises ~azure.core.exceptions.HttpResponseError: @@ -1069,7 +996,7 @@ def command( service_name: str, project_name: str, task_name: str, - parameters: IO, + parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any @@ -1077,7 +1004,7 @@ def command( """Execute a command on a task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. This method executes a command on a running task. + (classic) instance. This method executes a command on a running task. :param group_name: Name of the resource group. Required. :type group_name: str @@ -1088,11 +1015,10 @@ def command( :param task_name: Name of the Task. Required. :type task_name: str :param parameters: Command to execute. Required. - :type parameters: IO + :type parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: CommandProperties or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.CommandProperties :raises ~azure.core.exceptions.HttpResponseError: @@ -1105,13 +1031,13 @@ def command( service_name: str, project_name: str, task_name: str, - parameters: Union[_models.CommandProperties, IO], + parameters: Union[_models.CommandProperties, IO[bytes]], **kwargs: Any ) -> _models.CommandProperties: """Execute a command on a task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS - instance. This method executes a command on a running task. + (classic) instance. This method executes a command on a running task. :param group_name: Name of the resource group. Required. :type group_name: str @@ -1121,17 +1047,14 @@ def command( :type project_name: str :param task_name: Name of the Task. Required. :type task_name: str - :param parameters: Command to execute. Is either a model type or a IO type. Required. - :type parameters: ~azure.mgmt.datamigration.models.CommandProperties or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + :param parameters: Command to execute. Is either a CommandProperties type or a IO[bytes] type. + Required. + :type parameters: ~azure.mgmt.datamigration.models.CommandProperties or IO[bytes] :return: CommandProperties or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.CommandProperties :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1142,21 +1065,19 @@ def command( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.CommandProperties] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "CommandProperties") - request = build_command_request( + _request = build_command_request( group_name=group_name, service_name=service_name, project_name=project_name, @@ -1166,15 +1087,14 @@ def command( content_type=content_type, json=_json, content=_content, - template_url=self.command.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1184,13 +1104,9 @@ def command( error = self._deserialize.failsafe_deserialize(_models.ApiError, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("CommandProperties", pipeline_response) + deserialized = self._deserialize("CommandProperties", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - command.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}/command" - } + return deserialized # type: ignore diff --git a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_usages_operations.py b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_usages_operations.py index 7fc08418ab7c..ae1038a39ffb 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_usages_operations.py +++ b/sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/operations/_usages_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar import urllib.parse from azure.core.exceptions import ( @@ -20,20 +20,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,9 +43,7 @@ def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> Ht _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", "2022-03-30-preview") - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-07-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -59,7 +55,7 @@ def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> Ht "location": _SERIALIZER.url("location", location, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -93,12 +89,11 @@ def __init__(self, *args, **kwargs): def list(self, location: str, **kwargs: Any) -> Iterable["_models.Quota"]: """Get resource quotas and usage information. - This method returns region-specific quotas and resource usage information for the Database - Migration Service. + This method returns region-specific quotas and resource usage information for the Azure + Database Migration Service (classic). :param location: The Azure region of the operation. Required. :type location: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Quota or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datamigration.models.Quota] :raises ~azure.core.exceptions.HttpResponseError: @@ -106,12 +101,10 @@ def list(self, location: str, **kwargs: Any) -> Iterable["_models.Quota"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2022-03-30-preview"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.QuotaList] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -122,16 +115,14 @@ def list(self, location: str, **kwargs: Any) -> Iterable["_models.Quota"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( location=location, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: # make call to next link with the client's api-version @@ -143,13 +134,12 @@ def prepare_request(next_link=None): } ) _next_request_params["api-version"] = self._config.api_version - request = HttpRequest( + _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("QuotaList", pipeline_response) @@ -159,10 +149,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -174,7 +165,3 @@ def get_next(next_link=None): return pipeline_response return ItemPaged(get_next, extract_data) - - list.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/locations/{location}/usages" - } diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_create_or_update_database_migration_max.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_create_or_update_database_migration_max.py new file mode 100644 index 000000000000..050361ef73b4 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_create_or_update_database_migration_max.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.datamigration import DataMigrationManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datamigration +# USAGE + python cosmos_db_mongo_create_or_update_database_migration_max.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataMigrationManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.database_migrations_mongo_to_cosmos_dbv_core_mongo.begin_create( + resource_group_name="testrg", + target_resource_name="targetCosmosDbClusterName", + migration_name="migrationRequest", + parameters={ + "properties": { + "collectionList": [ + { + "sourceCollection": "sourceCol1", + "sourceDatabase": "sourceDb1", + "targetCollection": "targetCol1", + "targetDatabase": "targetDb1", + }, + { + "sourceCollection": "sourceCol2", + "sourceDatabase": "sourceDb2", + "targetCollection": "sourceCol2", + "targetDatabase": "sourceDb2", + }, + ], + "kind": "MongoToCosmosDbMongo", + "migrationService": "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.DataMigration/MigrationServices/testMigrationService", + "scope": "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.DocumentDB/mongoClusters/targetCosmosDbClusterName", + "sourceMongoConnection": { + "host": "abc.mongodb.com", + "password": "placeholder", + "port": 88, + "useSsl": True, + "userName": "abc", + }, + "targetMongoConnection": { + "host": "xyz.mongocluster.cosmos.azure.com", + "password": "placeholder", + "port": 10255, + "useSsl": True, + "userName": "def", + }, + } + }, + ).result() + print(response) + + +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/CosmosDbMongoCreateOrUpdateDatabaseMigrationMAX.json +if __name__ == "__main__": + main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_create_or_update_database_migration_min.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_create_or_update_database_migration_min.py new file mode 100644 index 000000000000..b522be8c7180 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_create_or_update_database_migration_min.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.datamigration import DataMigrationManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datamigration +# USAGE + python cosmos_db_mongo_create_or_update_database_migration_min.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataMigrationManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.database_migrations_mongo_to_cosmos_dbv_core_mongo.begin_create( + resource_group_name="testrg", + target_resource_name="targetCosmosDbClusterName", + migration_name="migrationRequest", + parameters={ + "properties": { + "collectionList": [ + { + "sourceCollection": "sourceCol1", + "sourceDatabase": "sourceDb1", + "targetCollection": "targetCol1", + "targetDatabase": "targetDb1", + }, + {"sourceCollection": "sourceCol2", "sourceDatabase": "sourceDb2"}, + ], + "kind": "MongoToCosmosDbMongo", + "migrationService": "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.DataMigration/MigrationServices/testMigrationService", + "scope": "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.DocumentDB/mongoClusters/targetCosmosDbClusterName", + "sourceMongoConnection": { + "host": "abc.mongodb.com", + "password": "placeholder", + "port": 88, + "useSsl": True, + "userName": "abc", + }, + "targetMongoConnection": {"connectionString": "placeholder"}, + } + }, + ).result() + print(response) + + +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/CosmosDbMongoCreateOrUpdateDatabaseMigrationMIN.json +if __name__ == "__main__": + main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_delete_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_delete_database_migration.py new file mode 100644 index 000000000000..f450f24c2220 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_delete_database_migration.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.datamigration import DataMigrationManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datamigration +# USAGE + python cosmos_db_mongo_delete_database_migration.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataMigrationManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + client.database_migrations_mongo_to_cosmos_dbv_core_mongo.begin_delete( + resource_group_name="testrg", + target_resource_name="targetCosmosDbClusterName", + migration_name="migrationRequest", + ).result() + + +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/CosmosDbMongoDeleteDatabaseMigration.json +if __name__ == "__main__": + main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_get_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_get_database_migration.py new file mode 100644 index 000000000000..6104ae90d458 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_get_database_migration.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.datamigration import DataMigrationManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datamigration +# USAGE + python cosmos_db_mongo_get_database_migration.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataMigrationManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.database_migrations_mongo_to_cosmos_dbv_core_mongo.get( + resource_group_name="testrg", + target_resource_name="targetCosmosDbClusterName", + migration_name="migrationRequest", + ) + print(response) + + +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/CosmosDbMongoGetDatabaseMigration.json +if __name__ == "__main__": + main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_get_database_migration_expanded.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_get_database_migration_expanded.py new file mode 100644 index 000000000000..47ce591f249a --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_get_database_migration_expanded.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.datamigration import DataMigrationManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datamigration +# USAGE + python cosmos_db_mongo_get_database_migration_expanded.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataMigrationManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.database_migrations_mongo_to_cosmos_dbv_core_mongo.get( + resource_group_name="testrg", + target_resource_name="targetCosmosDbClusterName", + migration_name="migrationRequest", + ) + print(response) + + +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/CosmosDbMongoGetDatabaseMigrationExpanded.json +if __name__ == "__main__": + main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_list_by_scope_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_list_by_scope_database_migration.py new file mode 100644 index 000000000000..499d217692f6 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/cosmos_db_mongo_list_by_scope_database_migration.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.datamigration import DataMigrationManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datamigration +# USAGE + python cosmos_db_mongo_list_by_scope_database_migration.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataMigrationManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.database_migrations_mongo_to_cosmos_dbv_core_mongo.get_for_scope( + resource_group_name="testrg", + target_resource_name="targetCosmosDbClusterName", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/CosmosDbMongoListByScopeDatabaseMigration.json +if __name__ == "__main__": + main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_max.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_max.py index bcd243168839..98b2d06935f8 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_max.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_max.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,14 +30,14 @@ def main(): subscription_id="00000000-1111-2222-3333-444444444444", ) - response = client.sql_migration_services.begin_create_or_update( + response = client.migration_services.begin_create_or_update( resource_group_name="testrg", - sql_migration_service_name="testagent", + migration_service_name="testagent", parameters={"location": "northeurope"}, ).result() print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/CreateOrUpdateMigrationServiceMAX.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/CreateOrUpdateMigrationServiceMAX.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_min.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_min.py index 47c4bec2b286..dac18fad4525 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_min.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_migration_service_min.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,14 +30,14 @@ def main(): subscription_id="00000000-1111-2222-3333-444444444444", ) - response = client.sql_migration_services.begin_create_or_update( + response = client.migration_services.begin_create_or_update( resource_group_name="testrg", - sql_migration_service_name="testagent", + migration_service_name="testagent", parameters={"location": "northeurope"}, ).result() print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/CreateOrUpdateMigrationServiceMIN.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/CreateOrUpdateMigrationServiceMIN.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_sql_migration_service_max.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_sql_migration_service_max.py new file mode 100644 index 000000000000..bf213d46231e --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_sql_migration_service_max.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.datamigration import DataMigrationManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datamigration +# USAGE + python create_or_update_sql_migration_service_max.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataMigrationManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.sql_migration_services.begin_create_or_update( + resource_group_name="testrg", + sql_migration_service_name="testagent", + parameters={"location": "northeurope"}, + ).result() + print(response) + + +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/CreateOrUpdateSqlMigrationServiceMAX.json +if __name__ == "__main__": + main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_sql_migration_service_min.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_sql_migration_service_min.py new file mode 100644 index 000000000000..844612421308 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/create_or_update_sql_migration_service_min.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.datamigration import DataMigrationManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datamigration +# USAGE + python create_or_update_sql_migration_service_min.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataMigrationManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.sql_migration_services.begin_create_or_update( + resource_group_name="testrg", + sql_migration_service_name="testagent", + parameters={"location": "northeurope"}, + ).result() + print(response) + + +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/CreateOrUpdateSqlMigrationServiceMIN.json +if __name__ == "__main__": + main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_integration_runtime_node.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_integration_runtime_node.py index f28ecf4b8645..ff9b7ffaf862 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_integration_runtime_node.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_integration_runtime_node.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/DeleteIntegrationRuntimeNode.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/DeleteIntegrationRuntimeNode.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_migration_service.py index 40d496e06f36..aa542dcb8920 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_migration_service.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_migration_service.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,13 +30,12 @@ def main(): subscription_id="00000000-1111-2222-3333-444444444444", ) - response = client.sql_migration_services.begin_delete( + client.migration_services.begin_delete( resource_group_name="testrg", - sql_migration_service_name="service1", + migration_service_name="service1", ).result() - print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/DeleteMigrationService.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/DeleteMigrationService.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_sql_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_sql_migration_service.py new file mode 100644 index 000000000000..e2c7ef212a7c --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/delete_sql_migration_service.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.datamigration import DataMigrationManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datamigration +# USAGE + python delete_sql_migration_service.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataMigrationManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + client.sql_migration_services.begin_delete( + resource_group_name="testrg", + sql_migration_service_name="service1", + ).result() + + +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/DeleteSqlMigrationService.json +if __name__ == "__main__": + main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_create_or_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_create_or_update.py index ff8e4e8f45e9..9ff12a4403ea 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_create_or_update.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_create_or_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -39,6 +40,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Files_CreateOrUpdate.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Files_CreateOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_delete.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_delete.py index 5c09a638dd25..2b8b58ad23c5 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_delete.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,15 +30,14 @@ def main(): subscription_id="fc04246f-04c5-437e-ac5e-206a19e7193f", ) - response = client.files.delete( + client.files.delete( group_name="DmsSdkRg", service_name="DmsSdkService", project_name="DmsSdkProject", file_name="x114d023d8", ) - print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Files_Delete.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Files_Delete.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_get.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_get.py index c3bc13aad6f2..69645acb45fd 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_get.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Files_Get.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Files_Get.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_list.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_list.py index 7fda5829c8be..cecfdf9b9fbd 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_list.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_list.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -38,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Files_List.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Files_List.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read.py index ad44996b9b8d..bd57f78cfce2 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Files_Read.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Files_Read.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read_write.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read_write.py index 0882266264f0..1d131f943ca7 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read_write.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_read_write.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Files_ReadWrite.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Files_ReadWrite.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_update.py index 0133391e1b05..2ab08b6a1391 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_update.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/files_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -39,6 +40,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Files_Update.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Files_Update.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_migration_service.py index 07f4eee706f2..0db46d5c6d94 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_migration_service.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_migration_service.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,13 +30,13 @@ def main(): subscription_id="00000000-1111-2222-3333-444444444444", ) - response = client.sql_migration_services.get( + response = client.migration_services.get( resource_group_name="testrg", - sql_migration_service_name="service1", + migration_service_name="service1", ) print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/GetMigrationService.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/GetMigrationService.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_monitor_data_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_monitor_data_sql_migration_service.py similarity index 90% rename from sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_monitor_data_migration_service.py rename to sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_monitor_data_sql_migration_service.py index fbb3a8baa4d5..7e65744bb091 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_monitor_data_migration_service.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_monitor_data_sql_migration_service.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -14,7 +15,7 @@ pip install azure-identity pip install azure-mgmt-datamigration # USAGE - python get_monitor_data_migration_service.py + python get_monitor_data_sql_migration_service.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -36,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/GetMonitorDataMigrationService.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/GetMonitorDataSqlMigrationService.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_sql_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_sql_migration_service.py new file mode 100644 index 000000000000..60c6936cfb7d --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/get_sql_migration_service.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.datamigration import DataMigrationManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datamigration +# USAGE + python get_sql_migration_service.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataMigrationManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.sql_migration_services.get( + resource_group_name="testrg", + sql_migration_service_name="service1", + ) + print(response) + + +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/GetSqlMigrationService.json +if __name__ == "__main__": + main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_auth_keys_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_auth_keys_sql_migration_service.py similarity index 90% rename from sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_auth_keys_migration_service.py rename to sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_auth_keys_sql_migration_service.py index 910ed8e27a59..ab321a693320 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_auth_keys_migration_service.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_auth_keys_sql_migration_service.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -14,7 +15,7 @@ pip install azure-identity pip install azure-mgmt-datamigration # USAGE - python list_auth_keys_migration_service.py + python list_auth_keys_sql_migration_service.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -36,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ListAuthKeysMigrationService.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/ListAuthKeysSqlMigrationService.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_resource_group_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_resource_group_migration_service.py index 65b788d1261c..0229e8939c43 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_resource_group_migration_service.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_resource_group_migration_service.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,13 +30,13 @@ def main(): subscription_id="00000000-1111-2222-3333-444444444444", ) - response = client.sql_migration_services.list_by_resource_group( + response = client.migration_services.list_by_resource_group( resource_group_name="testrg", ) for item in response: print(item) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ListByResourceGroupMigrationService.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/ListByResourceGroupMigrationService.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_resource_group_sql_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_resource_group_sql_migration_service.py new file mode 100644 index 000000000000..ff3ba7250559 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_resource_group_sql_migration_service.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.datamigration import DataMigrationManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datamigration +# USAGE + python list_by_resource_group_sql_migration_service.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataMigrationManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.sql_migration_services.list_by_resource_group( + resource_group_name="testrg", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/ListByResourceGroupSqlMigrationService.json +if __name__ == "__main__": + main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_subscription_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_subscription_migration_service.py index 767b6790181f..86fc323e2af8 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_subscription_migration_service.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_subscription_migration_service.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,11 +30,11 @@ def main(): subscription_id="subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.Sql/managedInstances/managedInstance1", ) - response = client.sql_migration_services.list_by_subscription() + response = client.migration_services.list_by_subscription() for item in response: print(item) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ListBySubscriptionMigrationService.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/ListBySubscriptionMigrationService.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_subscription_sql_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_subscription_sql_migration_service.py new file mode 100644 index 000000000000..9742435917a7 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_by_subscription_sql_migration_service.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.datamigration import DataMigrationManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datamigration +# USAGE + python list_by_subscription_sql_migration_service.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataMigrationManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/testrg/providers/Microsoft.Sql/managedInstances/managedInstance1", + ) + + response = client.sql_migration_services.list_by_subscription() + for item in response: + print(item) + + +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/ListBySubscriptionSqlMigrationService.json +if __name__ == "__main__": + main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_migrations_by_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_migrations_by_migration_service.py index d52fd7beb861..7cbf62c989ec 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_migrations_by_migration_service.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_migrations_by_migration_service.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,14 +30,14 @@ def main(): subscription_id="00000000-1111-2222-3333-444444444444", ) - response = client.sql_migration_services.list_migrations( + response = client.migration_services.list_migrations( resource_group_name="testrg", - sql_migration_service_name="service1", + migration_service_name="testMigrationService", ) for item in response: print(item) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ListMigrationsByMigrationService.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/ListMigrationsByMigrationService.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_migrations_by_sql_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_migrations_by_sql_migration_service.py new file mode 100644 index 000000000000..6c5e61bf42c6 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_migrations_by_sql_migration_service.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.datamigration import DataMigrationManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datamigration +# USAGE + python list_migrations_by_sql_migration_service.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataMigrationManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.sql_migration_services.list_migrations( + resource_group_name="testrg", + sql_migration_service_name="service1", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/ListMigrationsBySqlMigrationService.json +if __name__ == "__main__": + main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_operation.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_operation.py index 27d921da1820..7e8242f5e7f9 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_operation.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/list_operation.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -34,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ListOperation.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/ListOperation.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_create_or_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_create_or_update.py index 30ecd5ad1fd4..ae6bc76120ec 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_create_or_update.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_create_or_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Projects_CreateOrUpdate.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Projects_CreateOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_delete.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_delete.py index 2c4405dd482d..14f0ec410ed1 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_delete.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,14 +30,13 @@ def main(): subscription_id="fc04246f-04c5-437e-ac5e-206a19e7193f", ) - response = client.projects.delete( + client.projects.delete( group_name="DmsSdkRg", service_name="DmsSdkService", project_name="DmsSdkProject", ) - print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Projects_Delete.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Projects_Delete.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_get.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_get.py index b91acdb7aef3..2f074505ec30 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_get.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Projects_Get.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Projects_Get.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_list.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_list.py index 92b2465fd146..be957c8b5575 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_list.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_list.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -37,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Projects_List.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Projects_List.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_update.py index f4a9af3cab5c..0ffe0f28a5d3 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_update.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/projects_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Projects_Update.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Projects_Update.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/regen_auth_keys_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/regen_auth_keys_sql_migration_service.py similarity index 90% rename from sdk/datamigration/azure-mgmt-datamigration/generated_samples/regen_auth_keys_migration_service.py rename to sdk/datamigration/azure-mgmt-datamigration/generated_samples/regen_auth_keys_sql_migration_service.py index cdbacf854d0d..568c777646ed 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/regen_auth_keys_migration_service.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/regen_auth_keys_sql_migration_service.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -14,7 +15,7 @@ pip install azure-identity pip install azure-mgmt-datamigration # USAGE - python regen_auth_keys_migration_service.py + python regen_auth_keys_sql_migration_service.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/RegenAuthKeysMigrationService.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/RegenAuthKeysSqlMigrationService.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/resource_skus_list_skus.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/resource_skus_list_skus.py index d840dbb3eb0e..7e684c6f8c26 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/resource_skus_list_skus.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/resource_skus_list_skus.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -34,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ResourceSkus_ListSkus.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/ResourceSkus_ListSkus.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_cancel.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_cancel.py index c7d9e12933f3..69934819bbe5 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_cancel.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_cancel.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ServiceTasks_Cancel.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/ServiceTasks_Cancel.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_create_or_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_create_or_update.py index e743a0755578..fd224ee9978f 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_create_or_update.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_create_or_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -33,11 +34,18 @@ def main(): group_name="DmsSdkRg", service_name="DmsSdkService", task_name="DmsSdkTask", - parameters={"properties": {"input": {"serverVersion": "NA"}, "taskType": "Service.Check.OCI"}}, + parameters={ + "properties": { + "input": { + "sourceConnectionInfo": {"port": 3306, "serverName": "localhost", "type": "MySqlConnectionInfo"} + }, + "taskType": "ConnectToSource.MySql", + } + }, ) print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ServiceTasks_CreateOrUpdate.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/ServiceTasks_CreateOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_delete.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_delete.py index 508f39869e75..c34c6f6378f2 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_delete.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,14 +30,13 @@ def main(): subscription_id="fc04246f-04c5-437e-ac5e-206a19e7193f", ) - response = client.service_tasks.delete( + client.service_tasks.delete( group_name="DmsSdkRg", service_name="DmsSdkService", task_name="DmsSdkTask", ) - print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ServiceTasks_Delete.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/ServiceTasks_Delete.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_get.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_get.py index 4c405770957c..2339835b7e99 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_get.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ServiceTasks_Get.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/ServiceTasks_Get.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_list.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_list.py index 938aab05cac7..2b28861061ce 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_list.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_list.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -37,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ServiceTasks_List.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/ServiceTasks_List.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_update.py index 7c22110decf8..4b0d3a0ebc4a 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_update.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/service_tasks_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -33,11 +34,18 @@ def main(): group_name="DmsSdkRg", service_name="DmsSdkService", task_name="DmsSdkTask", - parameters={"properties": {"input": {"serverVersion": "NA"}, "taskType": "Service.Check.OCI"}}, + parameters={ + "properties": { + "input": { + "sourceConnectionInfo": {"port": 3306, "serverName": "localhost", "type": "MySqlConnectionInfo"} + }, + "taskType": "ConnectToSource.MySql", + } + }, ) print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/ServiceTasks_Update.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/ServiceTasks_Update.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_children_name_availability.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_children_name_availability.py index fb2fe8a39a50..b633fcc4a5b4 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_children_name_availability.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_children_name_availability.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_CheckChildrenNameAvailability.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Services_CheckChildrenNameAvailability.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_name_availability.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_name_availability.py index d8b50b5c39c4..604670470b65 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_name_availability.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_name_availability.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -36,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_CheckNameAvailability.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Services_CheckNameAvailability.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_status.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_status.py index 76d907a83761..c13066777384 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_status.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_check_status.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -36,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_CheckStatus.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Services_CheckStatus.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_create_or_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_create_or_update.py index 6789f1c0d2f5..29df42b9d379 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_create_or_update.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_create_or_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -43,6 +44,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_CreateOrUpdate.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Services_CreateOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_delete.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_delete.py index 889012e3503c..405fc94b553e 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_delete.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,13 +30,12 @@ def main(): subscription_id="fc04246f-04c5-437e-ac5e-206a19e7193f", ) - response = client.services.begin_delete( + client.services.begin_delete( group_name="DmsSdkRg", service_name="DmsSdkService", ).result() - print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_Delete.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Services_Delete.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_get.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_get.py index 690b44e4ddc1..d75924cde052 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_get.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -36,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_Get.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Services_Get.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list.py index 7c445fa2f2ab..b2c59a38ccc6 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -34,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_List.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Services_List.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_by_resource_group.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_by_resource_group.py index 83b6c26cf554..04fd74a3ecc2 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_by_resource_group.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_by_resource_group.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -36,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_ListByResourceGroup.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Services_ListByResourceGroup.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_skus.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_skus.py index 023a71b6e58d..ba233d8baee1 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_skus.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_list_skus.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -37,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_ListSkus.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Services_ListSkus.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_start.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_start.py index e9964e6ebd6f..d69123228e27 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_start.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_start.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,13 +30,12 @@ def main(): subscription_id="fc04246f-04c5-437e-ac5e-206a19e7193f", ) - response = client.services.begin_start( + client.services.begin_start( group_name="DmsSdkRg", service_name="DmsSdkService", ).result() - print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_Start.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Services_Start.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_stop.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_stop.py index 53872ced25d1..cb2364ee18d9 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_stop.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_stop.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,13 +30,12 @@ def main(): subscription_id="fc04246f-04c5-437e-ac5e-206a19e7193f", ) - response = client.services.begin_stop( + client.services.begin_stop( group_name="DmsSdkRg", service_name="DmsSdkService", ).result() - print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_Stop.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Services_Stop.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_update.py index a584a3151afb..f2df77ef63b3 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_update.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/services_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -42,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Services_Update.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Services_Update.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_cancel_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_cancel_database_migration.py index 0d5c3dd7a042..c5e0618ec808 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_cancel_database_migration.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_cancel_database_migration.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,15 +30,14 @@ def main(): subscription_id="00000000-1111-2222-3333-444444444444", ) - response = client.database_migrations_sql_db.begin_cancel( + client.database_migrations_sql_db.begin_cancel( resource_group_name="testrg", sql_db_instance_name="sqldbinstance", target_db_name="db1", parameters={"migrationOperationId": "9a90bb84-e70f-46f7-b0ae-1aef5b3b9f07"}, ).result() - print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlDbCancelDatabaseMigration.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlDbCancelDatabaseMigration.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_max.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_max.py index 79fac342ea71..aaca38135473 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_max.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_max.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -62,6 +63,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlDbCreateOrUpdateDatabaseMigrationMAX.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlDbCreateOrUpdateDatabaseMigrationMAX.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_min.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_min.py index ba4edfd758fc..9a81e8b23174 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_min.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_create_or_update_database_migration_min.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -61,6 +62,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlDbCreateOrUpdateDatabaseMigrationMIN.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlDbCreateOrUpdateDatabaseMigrationMIN.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_delete_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_delete_database_migration.py index eb138caeef78..8ad7dfa8e5d7 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_delete_database_migration.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_delete_database_migration.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,14 +30,13 @@ def main(): subscription_id="00000000-1111-2222-3333-444444444444", ) - response = client.database_migrations_sql_db.begin_delete( + client.database_migrations_sql_db.begin_delete( resource_group_name="testrg", sql_db_instance_name="sqldbinstance", target_db_name="db1", ).result() - print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlDbDeleteDatabaseMigration.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlDbDeleteDatabaseMigration.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration.py index a9eb1654be66..c8515c34e6e8 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlDbGetDatabaseMigration.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlDbGetDatabaseMigration.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration_expanded.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration_expanded.py index c956643d291e..7e9cf487f1d9 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration_expanded.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_db_get_database_migration_expanded.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlDbGetDatabaseMigrationExpanded.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlDbGetDatabaseMigrationExpanded.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cancel_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cancel_database_migration.py index b38612880408..b35217a3b59d 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cancel_database_migration.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cancel_database_migration.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,15 +30,14 @@ def main(): subscription_id="00000000-1111-2222-3333-444444444444", ) - response = client.database_migrations_sql_mi.begin_cancel( + client.database_migrations_sql_mi.begin_cancel( resource_group_name="testrg", managed_instance_name="managedInstance1", target_db_name="db1", parameters={"migrationOperationId": "4124fe90-d1b6-4b50-b4d9-46d02381f59a"}, ).result() - print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlMiCancelDatabaseMigration.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlMiCancelDatabaseMigration.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_max.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_max.py index 1202df122e28..4513952feafb 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_max.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_max.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -63,6 +64,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlMiCreateOrUpdateDatabaseMigrationMAX.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlMiCreateOrUpdateDatabaseMigrationMAX.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_min.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_min.py index 2122e311aa97..d3f02b9847f9 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_min.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_create_or_update_database_migration_min.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -62,6 +63,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlMiCreateOrUpdateDatabaseMigrationMIN.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlMiCreateOrUpdateDatabaseMigrationMIN.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cutover_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cutover_database_migration.py index 1d62366a62ac..7933483148a7 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cutover_database_migration.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_cutover_database_migration.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,15 +30,14 @@ def main(): subscription_id="00000000-1111-2222-3333-444444444444", ) - response = client.database_migrations_sql_mi.begin_cutover( + client.database_migrations_sql_mi.begin_cutover( resource_group_name="testrg", managed_instance_name="managedInstance1", target_db_name="db1", parameters={"migrationOperationId": "4124fe90-d1b6-4b50-b4d9-46d02381f59a"}, ).result() - print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlMiCutoverDatabaseMigration.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlMiCutoverDatabaseMigration.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration.py index 3d0943aede67..0fc944ee5910 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlMiGetDatabaseMigration.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlMiGetDatabaseMigration.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration_expanded.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration_expanded.py index d8d6bfc6684e..b019bd0d227a 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration_expanded.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_mi_get_database_migration_expanded.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlMiGetDatabaseMigrationExpanded.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlMiGetDatabaseMigrationExpanded.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cancel_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cancel_database_migration.py index 1b40510df83d..539fd9c725f2 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cancel_database_migration.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cancel_database_migration.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,15 +30,14 @@ def main(): subscription_id="00000000-1111-2222-3333-444444444444", ) - response = client.database_migrations_sql_vm.begin_cancel( + client.database_migrations_sql_vm.begin_cancel( resource_group_name="testrg", sql_virtual_machine_name="testvm", target_db_name="db1", parameters={"migrationOperationId": "4124fe90-d1b6-4b50-b4d9-46d02381f59a"}, ).result() - print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlVmCancelDatabaseMigration.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlVmCancelDatabaseMigration.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_max.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_max.py index abe4d72474d3..7a0c7383e4c7 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_max.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_max.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -63,6 +64,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlVmCreateOrUpdateDatabaseMigrationMAX.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlVmCreateOrUpdateDatabaseMigrationMAX.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_min.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_min.py index 6a86febb00f7..8a0114cd785f 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_min.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_create_or_update_database_migration_min.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -62,6 +63,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlVmCreateOrUpdateDatabaseMigrationMIN.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlVmCreateOrUpdateDatabaseMigrationMIN.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cutover_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cutover_database_migration.py index c2b759e0f9e2..f6544afcf842 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cutover_database_migration.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_cutover_database_migration.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,15 +30,14 @@ def main(): subscription_id="00000000-1111-2222-3333-444444444444", ) - response = client.database_migrations_sql_vm.begin_cutover( + client.database_migrations_sql_vm.begin_cutover( resource_group_name="testrg", sql_virtual_machine_name="testvm", target_db_name="db1", parameters={"migrationOperationId": "4124fe90-d1b6-4b50-b4d9-46d02381f59a"}, ).result() - print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlVmCutoverDatabaseMigration.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlVmCutoverDatabaseMigration.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration.py index a0167d9a4d8b..541c64748c0a 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlVmGetDatabaseMigration.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlVmGetDatabaseMigration.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration_expanded.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration_expanded.py index 900791f99a5f..6ec6eef58d29 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration_expanded.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/sql_vm_get_database_migration_expanded.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/SqlVmGetDatabaseMigrationExpanded.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/SqlVmGetDatabaseMigrationExpanded.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_cancel.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_cancel.py index dec7cd587696..e116d0345a34 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_cancel.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_cancel.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Tasks_Cancel.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Tasks_Cancel.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_command.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_command.py index b6a9b841eddf..6c8b7d497c39 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_command.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_command.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -39,6 +40,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Tasks_Command.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Tasks_Command.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_create_or_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_create_or_update.py index 1c22aa9d33ff..12d7f977766b 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_create_or_update.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_create_or_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -54,6 +55,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Tasks_CreateOrUpdate.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Tasks_CreateOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_delete.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_delete.py index 08e6cff502d8..7d7d4b146796 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_delete.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,15 +30,14 @@ def main(): subscription_id="fc04246f-04c5-437e-ac5e-206a19e7193f", ) - response = client.tasks.delete( + client.tasks.delete( group_name="DmsSdkRg", service_name="DmsSdkService", project_name="DmsSdkProject", task_name="DmsSdkTask", ) - print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Tasks_Delete.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Tasks_Delete.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_get.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_get.py index 42fffa423dba..6b7efc3423c5 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_get.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Tasks_Get.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Tasks_Get.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_list.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_list.py index 64bfed16aff8..c9aede57626a 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_list.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_list.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -38,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Tasks_List.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Tasks_List.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_update.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_update.py index 28d46d756d2b..99a0b7cc9495 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_update.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/tasks_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -54,6 +55,6 @@ def main(): print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Tasks_Update.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Tasks_Update.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/update_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/update_migration_service.py index a016018f2e5d..59ae672f1a11 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/update_migration_service.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/update_migration_service.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -29,14 +30,14 @@ def main(): subscription_id="00000000-1111-2222-3333-444444444444", ) - response = client.sql_migration_services.begin_update( + response = client.migration_services.begin_update( resource_group_name="testrg", - sql_migration_service_name="testagent", + migration_service_name="testagent", parameters={"tags": {"mytag": "myval"}}, ).result() print(response) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/UpdateMigrationService.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/UpdateMigrationService.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/update_sql_migration_service.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/update_sql_migration_service.py new file mode 100644 index 000000000000..aa9f3ccad22e --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/update_sql_migration_service.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.datamigration import DataMigrationManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datamigration +# USAGE + python update_sql_migration_service.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataMigrationManagementClient( + credential=DefaultAzureCredential(), + subscription_id="00000000-1111-2222-3333-444444444444", + ) + + response = client.sql_migration_services.begin_update( + resource_group_name="testrg", + sql_migration_service_name="testagent", + parameters={"tags": {"mytag": "myval"}}, + ).result() + print(response) + + +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/UpdateSqlMigrationService.json +if __name__ == "__main__": + main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/usages_list.py b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/usages_list.py index 88a866a9e459..05ecabeb501a 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/generated_samples/usages_list.py +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_samples/usages_list.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.datamigration import DataMigrationManagementClient """ @@ -36,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2022-03-30-preview/examples/Usages_List.json +# x-ms-original-file: specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2023-07-15-preview/examples/Usages_List.json if __name__ == "__main__": main() diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/conftest.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/conftest.py new file mode 100644 index 000000000000..4d80095ff6b9 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/conftest.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# aovid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + datamigrationmanagement_subscription_id = os.environ.get( + "AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000" + ) + datamigrationmanagement_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + datamigrationmanagement_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + datamigrationmanagement_client_secret = os.environ.get( + "AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer( + regex=datamigrationmanagement_subscription_id, value="00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer(regex=datamigrationmanagement_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=datamigrationmanagement_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer( + regex=datamigrationmanagement_client_secret, value="00000000-0000-0000-0000-000000000000" + ) + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py new file mode 100644 index 000000000000..382aad428fdb --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_db_ru_mongo_operations.py @@ -0,0 +1,123 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementDatabaseMigrationsMongoToCosmosDbRUMongoOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.get( + resource_group_name=resource_group.name, + target_resource_name="str", + migration_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create(self, resource_group): + response = self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.begin_create( + resource_group_name=resource_group.name, + target_resource_name="str", + migration_name="str", + parameters={ + "collectionList": [ + { + "migrationProgressDetails": { + "durationInSeconds": 0, + "migrationError": "str", + "migrationStatus": "str", + "processedDocumentCount": 0, + "sourceDocumentCount": 0, + }, + "sourceCollection": "str", + "sourceDatabase": "str", + "targetCollection": "str", + "targetDatabase": "str", + } + ], + "endedOn": "2020-02-20 00:00:00", + "id": "str", + "migrationFailureError": {"code": "str", "message": "str"}, + "migrationOperationId": "str", + "migrationService": "str", + "migrationStatus": "str", + "name": "str", + "provisioningError": "str", + "provisioningState": "str", + "scope": "str", + "sourceMongoConnection": { + "connectionString": "str", + "host": "str", + "password": "str", + "port": 0, + "useSsl": bool, + "userName": "str", + }, + "startedOn": "2020-02-20 00:00:00", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "targetMongoConnection": { + "connectionString": "str", + "host": "str", + "password": "str", + "port": 0, + "useSsl": bool, + "userName": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.begin_delete( + resource_group_name=resource_group.name, + target_resource_name="str", + migration_name="str", + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_for_scope(self, resource_group): + response = self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.get_for_scope( + resource_group_name=resource_group.name, + target_resource_name="str", + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_db_ru_mongo_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_db_ru_mongo_operations_async.py new file mode 100644 index 000000000000..f5c2ddd89ce8 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_db_ru_mongo_operations_async.py @@ -0,0 +1,128 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration.aio import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementDatabaseMigrationsMongoToCosmosDbRUMongoOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.get( + resource_group_name=resource_group.name, + target_resource_name="str", + migration_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create(self, resource_group): + response = await ( + await self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.begin_create( + resource_group_name=resource_group.name, + target_resource_name="str", + migration_name="str", + parameters={ + "collectionList": [ + { + "migrationProgressDetails": { + "durationInSeconds": 0, + "migrationError": "str", + "migrationStatus": "str", + "processedDocumentCount": 0, + "sourceDocumentCount": 0, + }, + "sourceCollection": "str", + "sourceDatabase": "str", + "targetCollection": "str", + "targetDatabase": "str", + } + ], + "endedOn": "2020-02-20 00:00:00", + "id": "str", + "migrationFailureError": {"code": "str", "message": "str"}, + "migrationOperationId": "str", + "migrationService": "str", + "migrationStatus": "str", + "name": "str", + "provisioningError": "str", + "provisioningState": "str", + "scope": "str", + "sourceMongoConnection": { + "connectionString": "str", + "host": "str", + "password": "str", + "port": 0, + "useSsl": bool, + "userName": "str", + }, + "startedOn": "2020-02-20 00:00:00", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "targetMongoConnection": { + "connectionString": "str", + "host": "str", + "password": "str", + "port": 0, + "useSsl": bool, + "userName": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.begin_delete( + resource_group_name=resource_group.name, + target_resource_name="str", + migration_name="str", + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_for_scope(self, resource_group): + response = self.client.database_migrations_mongo_to_cosmos_db_ru_mongo.get_for_scope( + resource_group_name=resource_group.name, + target_resource_name="str", + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py new file mode 100644 index 000000000000..1e3f7677b1da --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations.py @@ -0,0 +1,123 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementDatabaseMigrationsMongoToCosmosDbvCoreMongoOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.get( + resource_group_name=resource_group.name, + target_resource_name="str", + migration_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create(self, resource_group): + response = self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.begin_create( + resource_group_name=resource_group.name, + target_resource_name="str", + migration_name="str", + parameters={ + "collectionList": [ + { + "migrationProgressDetails": { + "durationInSeconds": 0, + "migrationError": "str", + "migrationStatus": "str", + "processedDocumentCount": 0, + "sourceDocumentCount": 0, + }, + "sourceCollection": "str", + "sourceDatabase": "str", + "targetCollection": "str", + "targetDatabase": "str", + } + ], + "endedOn": "2020-02-20 00:00:00", + "id": "str", + "migrationFailureError": {"code": "str", "message": "str"}, + "migrationOperationId": "str", + "migrationService": "str", + "migrationStatus": "str", + "name": "str", + "provisioningError": "str", + "provisioningState": "str", + "scope": "str", + "sourceMongoConnection": { + "connectionString": "str", + "host": "str", + "password": "str", + "port": 0, + "useSsl": bool, + "userName": "str", + }, + "startedOn": "2020-02-20 00:00:00", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "targetMongoConnection": { + "connectionString": "str", + "host": "str", + "password": "str", + "port": 0, + "useSsl": bool, + "userName": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.begin_delete( + resource_group_name=resource_group.name, + target_resource_name="str", + migration_name="str", + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_for_scope(self, resource_group): + response = self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.get_for_scope( + resource_group_name=resource_group.name, + target_resource_name="str", + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations_async.py new file mode 100644 index 000000000000..5d8c644233b5 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_mongo_to_cosmos_dbv_core_mongo_operations_async.py @@ -0,0 +1,128 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration.aio import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementDatabaseMigrationsMongoToCosmosDbvCoreMongoOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.get( + resource_group_name=resource_group.name, + target_resource_name="str", + migration_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create(self, resource_group): + response = await ( + await self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.begin_create( + resource_group_name=resource_group.name, + target_resource_name="str", + migration_name="str", + parameters={ + "collectionList": [ + { + "migrationProgressDetails": { + "durationInSeconds": 0, + "migrationError": "str", + "migrationStatus": "str", + "processedDocumentCount": 0, + "sourceDocumentCount": 0, + }, + "sourceCollection": "str", + "sourceDatabase": "str", + "targetCollection": "str", + "targetDatabase": "str", + } + ], + "endedOn": "2020-02-20 00:00:00", + "id": "str", + "migrationFailureError": {"code": "str", "message": "str"}, + "migrationOperationId": "str", + "migrationService": "str", + "migrationStatus": "str", + "name": "str", + "provisioningError": "str", + "provisioningState": "str", + "scope": "str", + "sourceMongoConnection": { + "connectionString": "str", + "host": "str", + "password": "str", + "port": 0, + "useSsl": bool, + "userName": "str", + }, + "startedOn": "2020-02-20 00:00:00", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "targetMongoConnection": { + "connectionString": "str", + "host": "str", + "password": "str", + "port": 0, + "useSsl": bool, + "userName": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.begin_delete( + resource_group_name=resource_group.name, + target_resource_name="str", + migration_name="str", + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_for_scope(self, resource_group): + response = self.client.database_migrations_mongo_to_cosmos_dbv_core_mongo.get_for_scope( + resource_group_name=resource_group.name, + target_resource_name="str", + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_db_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_db_operations.py new file mode 100644 index 000000000000..6f0c2c28f006 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_db_operations.py @@ -0,0 +1,137 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementDatabaseMigrationsSqlDbOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.database_migrations_sql_db.get( + resource_group_name=resource_group.name, + sql_db_instance_name="str", + target_db_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_or_update(self, resource_group): + response = self.client.database_migrations_sql_db.begin_create_or_update( + resource_group_name=resource_group.name, + sql_db_instance_name="str", + target_db_name="str", + parameters={ + "id": "str", + "name": "str", + "properties": { + "kind": "SqlDb", + "endedOn": "2020-02-20 00:00:00", + "migrationFailureError": {"code": "str", "message": "str"}, + "migrationOperationId": "str", + "migrationService": "str", + "migrationStatus": "str", + "migrationStatusDetails": { + "listOfCopyProgressDetails": [ + { + "copyDuration": 0, + "copyStart": "2020-02-20 00:00:00", + "copyThroughput": 0.0, + "dataRead": 0, + "dataWritten": 0, + "parallelCopyType": "str", + "rowsCopied": 0, + "rowsRead": 0, + "status": "str", + "tableName": "str", + "usedParallelCopies": 0, + } + ], + "migrationState": "str", + "sqlDataCopyErrors": ["str"], + }, + "offlineConfiguration": {"offline": bool}, + "provisioningError": "str", + "provisioningState": "str", + "scope": "str", + "sourceDatabaseName": "str", + "sourceServerName": "str", + "sourceSqlConnection": { + "authentication": "str", + "dataSource": "str", + "encryptConnection": bool, + "password": "str", + "trustServerCertificate": bool, + "userName": "str", + }, + "startedOn": "2020-02-20 00:00:00", + "tableList": ["str"], + "targetDatabaseCollation": "str", + "targetSqlConnection": { + "authentication": "str", + "dataSource": "str", + "encryptConnection": bool, + "password": "str", + "trustServerCertificate": bool, + "userName": "str", + }, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.database_migrations_sql_db.begin_delete( + resource_group_name=resource_group.name, + sql_db_instance_name="str", + target_db_name="str", + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_cancel(self, resource_group): + response = self.client.database_migrations_sql_db.begin_cancel( + resource_group_name=resource_group.name, + sql_db_instance_name="str", + target_db_name="str", + parameters={"migrationOperationId": "str"}, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_db_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_db_operations_async.py new file mode 100644 index 000000000000..897d0a685086 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_db_operations_async.py @@ -0,0 +1,144 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration.aio import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementDatabaseMigrationsSqlDbOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.database_migrations_sql_db.get( + resource_group_name=resource_group.name, + sql_db_instance_name="str", + target_db_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_or_update(self, resource_group): + response = await ( + await self.client.database_migrations_sql_db.begin_create_or_update( + resource_group_name=resource_group.name, + sql_db_instance_name="str", + target_db_name="str", + parameters={ + "id": "str", + "name": "str", + "properties": { + "kind": "SqlDb", + "endedOn": "2020-02-20 00:00:00", + "migrationFailureError": {"code": "str", "message": "str"}, + "migrationOperationId": "str", + "migrationService": "str", + "migrationStatus": "str", + "migrationStatusDetails": { + "listOfCopyProgressDetails": [ + { + "copyDuration": 0, + "copyStart": "2020-02-20 00:00:00", + "copyThroughput": 0.0, + "dataRead": 0, + "dataWritten": 0, + "parallelCopyType": "str", + "rowsCopied": 0, + "rowsRead": 0, + "status": "str", + "tableName": "str", + "usedParallelCopies": 0, + } + ], + "migrationState": "str", + "sqlDataCopyErrors": ["str"], + }, + "offlineConfiguration": {"offline": bool}, + "provisioningError": "str", + "provisioningState": "str", + "scope": "str", + "sourceDatabaseName": "str", + "sourceServerName": "str", + "sourceSqlConnection": { + "authentication": "str", + "dataSource": "str", + "encryptConnection": bool, + "password": "str", + "trustServerCertificate": bool, + "userName": "str", + }, + "startedOn": "2020-02-20 00:00:00", + "tableList": ["str"], + "targetDatabaseCollation": "str", + "targetSqlConnection": { + "authentication": "str", + "dataSource": "str", + "encryptConnection": bool, + "password": "str", + "trustServerCertificate": bool, + "userName": "str", + }, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.database_migrations_sql_db.begin_delete( + resource_group_name=resource_group.name, + sql_db_instance_name="str", + target_db_name="str", + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_cancel(self, resource_group): + response = await ( + await self.client.database_migrations_sql_db.begin_cancel( + resource_group_name=resource_group.name, + sql_db_instance_name="str", + target_db_name="str", + parameters={"migrationOperationId": "str"}, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_mi_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_mi_operations.py new file mode 100644 index 000000000000..902166e69274 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_mi_operations.py @@ -0,0 +1,208 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementDatabaseMigrationsSqlMiOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.database_migrations_sql_mi.get( + resource_group_name=resource_group.name, + managed_instance_name="str", + target_db_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_or_update(self, resource_group): + response = self.client.database_migrations_sql_mi.begin_create_or_update( + resource_group_name=resource_group.name, + managed_instance_name="str", + target_db_name="str", + parameters={ + "id": "str", + "name": "str", + "properties": { + "kind": "SqlMi", + "backupConfiguration": { + "sourceLocation": { + "azureBlob": { + "accountKey": "str", + "blobContainerName": "str", + "storageAccountResourceId": "str", + }, + "fileShare": {"password": "str", "path": "str", "username": "str"}, + "fileStorageType": "str", + }, + "targetLocation": {"accountKey": "str", "storageAccountResourceId": "str"}, + }, + "endedOn": "2020-02-20 00:00:00", + "migrationFailureError": {"code": "str", "message": "str"}, + "migrationOperationId": "str", + "migrationService": "str", + "migrationStatus": "str", + "migrationStatusDetails": { + "activeBackupSets": [ + { + "backupFinishDate": "2020-02-20 00:00:00", + "backupSetId": "str", + "backupStartDate": "2020-02-20 00:00:00", + "backupType": "str", + "familyCount": 0, + "firstLSN": "str", + "hasBackupChecksums": bool, + "ignoreReasons": ["str"], + "isBackupRestored": bool, + "lastLSN": "str", + "listOfBackupFiles": [ + { + "copyDuration": 0, + "copyThroughput": 0.0, + "dataRead": 0, + "dataWritten": 0, + "familySequenceNumber": 0, + "fileName": "str", + "status": "str", + "totalSize": 0, + } + ], + } + ], + "blobContainerName": "str", + "completeRestoreErrorMessage": "str", + "currentRestoringFilename": "str", + "fileUploadBlockingErrors": ["str"], + "fullBackupSetInfo": { + "backupFinishDate": "2020-02-20 00:00:00", + "backupSetId": "str", + "backupStartDate": "2020-02-20 00:00:00", + "backupType": "str", + "familyCount": 0, + "firstLSN": "str", + "hasBackupChecksums": bool, + "ignoreReasons": ["str"], + "isBackupRestored": bool, + "lastLSN": "str", + "listOfBackupFiles": [ + { + "copyDuration": 0, + "copyThroughput": 0.0, + "dataRead": 0, + "dataWritten": 0, + "familySequenceNumber": 0, + "fileName": "str", + "status": "str", + "totalSize": 0, + } + ], + }, + "invalidFiles": ["str"], + "isFullBackupRestored": bool, + "lastRestoredBackupSetInfo": { + "backupFinishDate": "2020-02-20 00:00:00", + "backupSetId": "str", + "backupStartDate": "2020-02-20 00:00:00", + "backupType": "str", + "familyCount": 0, + "firstLSN": "str", + "hasBackupChecksums": bool, + "ignoreReasons": ["str"], + "isBackupRestored": bool, + "lastLSN": "str", + "listOfBackupFiles": [ + { + "copyDuration": 0, + "copyThroughput": 0.0, + "dataRead": 0, + "dataWritten": 0, + "familySequenceNumber": 0, + "fileName": "str", + "status": "str", + "totalSize": 0, + } + ], + }, + "lastRestoredFilename": "str", + "migrationState": "str", + "pendingLogBackupsCount": 0, + "restoreBlockingReason": "str", + }, + "offlineConfiguration": {"lastBackupName": "str", "offline": bool}, + "provisioningError": "str", + "provisioningState": "str", + "scope": "str", + "sourceDatabaseName": "str", + "sourceServerName": "str", + "sourceSqlConnection": { + "authentication": "str", + "dataSource": "str", + "encryptConnection": bool, + "password": "str", + "trustServerCertificate": bool, + "userName": "str", + }, + "startedOn": "2020-02-20 00:00:00", + "targetDatabaseCollation": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_cancel(self, resource_group): + response = self.client.database_migrations_sql_mi.begin_cancel( + resource_group_name=resource_group.name, + managed_instance_name="str", + target_db_name="str", + parameters={"migrationOperationId": "str"}, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_cutover(self, resource_group): + response = self.client.database_migrations_sql_mi.begin_cutover( + resource_group_name=resource_group.name, + managed_instance_name="str", + target_db_name="str", + parameters={"migrationOperationId": "str"}, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_mi_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_mi_operations_async.py new file mode 100644 index 000000000000..e0c7e6e1b8a3 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_mi_operations_async.py @@ -0,0 +1,215 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration.aio import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementDatabaseMigrationsSqlMiOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.database_migrations_sql_mi.get( + resource_group_name=resource_group.name, + managed_instance_name="str", + target_db_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_or_update(self, resource_group): + response = await ( + await self.client.database_migrations_sql_mi.begin_create_or_update( + resource_group_name=resource_group.name, + managed_instance_name="str", + target_db_name="str", + parameters={ + "id": "str", + "name": "str", + "properties": { + "kind": "SqlMi", + "backupConfiguration": { + "sourceLocation": { + "azureBlob": { + "accountKey": "str", + "blobContainerName": "str", + "storageAccountResourceId": "str", + }, + "fileShare": {"password": "str", "path": "str", "username": "str"}, + "fileStorageType": "str", + }, + "targetLocation": {"accountKey": "str", "storageAccountResourceId": "str"}, + }, + "endedOn": "2020-02-20 00:00:00", + "migrationFailureError": {"code": "str", "message": "str"}, + "migrationOperationId": "str", + "migrationService": "str", + "migrationStatus": "str", + "migrationStatusDetails": { + "activeBackupSets": [ + { + "backupFinishDate": "2020-02-20 00:00:00", + "backupSetId": "str", + "backupStartDate": "2020-02-20 00:00:00", + "backupType": "str", + "familyCount": 0, + "firstLSN": "str", + "hasBackupChecksums": bool, + "ignoreReasons": ["str"], + "isBackupRestored": bool, + "lastLSN": "str", + "listOfBackupFiles": [ + { + "copyDuration": 0, + "copyThroughput": 0.0, + "dataRead": 0, + "dataWritten": 0, + "familySequenceNumber": 0, + "fileName": "str", + "status": "str", + "totalSize": 0, + } + ], + } + ], + "blobContainerName": "str", + "completeRestoreErrorMessage": "str", + "currentRestoringFilename": "str", + "fileUploadBlockingErrors": ["str"], + "fullBackupSetInfo": { + "backupFinishDate": "2020-02-20 00:00:00", + "backupSetId": "str", + "backupStartDate": "2020-02-20 00:00:00", + "backupType": "str", + "familyCount": 0, + "firstLSN": "str", + "hasBackupChecksums": bool, + "ignoreReasons": ["str"], + "isBackupRestored": bool, + "lastLSN": "str", + "listOfBackupFiles": [ + { + "copyDuration": 0, + "copyThroughput": 0.0, + "dataRead": 0, + "dataWritten": 0, + "familySequenceNumber": 0, + "fileName": "str", + "status": "str", + "totalSize": 0, + } + ], + }, + "invalidFiles": ["str"], + "isFullBackupRestored": bool, + "lastRestoredBackupSetInfo": { + "backupFinishDate": "2020-02-20 00:00:00", + "backupSetId": "str", + "backupStartDate": "2020-02-20 00:00:00", + "backupType": "str", + "familyCount": 0, + "firstLSN": "str", + "hasBackupChecksums": bool, + "ignoreReasons": ["str"], + "isBackupRestored": bool, + "lastLSN": "str", + "listOfBackupFiles": [ + { + "copyDuration": 0, + "copyThroughput": 0.0, + "dataRead": 0, + "dataWritten": 0, + "familySequenceNumber": 0, + "fileName": "str", + "status": "str", + "totalSize": 0, + } + ], + }, + "lastRestoredFilename": "str", + "migrationState": "str", + "pendingLogBackupsCount": 0, + "restoreBlockingReason": "str", + }, + "offlineConfiguration": {"lastBackupName": "str", "offline": bool}, + "provisioningError": "str", + "provisioningState": "str", + "scope": "str", + "sourceDatabaseName": "str", + "sourceServerName": "str", + "sourceSqlConnection": { + "authentication": "str", + "dataSource": "str", + "encryptConnection": bool, + "password": "str", + "trustServerCertificate": bool, + "userName": "str", + }, + "startedOn": "2020-02-20 00:00:00", + "targetDatabaseCollation": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_cancel(self, resource_group): + response = await ( + await self.client.database_migrations_sql_mi.begin_cancel( + resource_group_name=resource_group.name, + managed_instance_name="str", + target_db_name="str", + parameters={"migrationOperationId": "str"}, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_cutover(self, resource_group): + response = await ( + await self.client.database_migrations_sql_mi.begin_cutover( + resource_group_name=resource_group.name, + managed_instance_name="str", + target_db_name="str", + parameters={"migrationOperationId": "str"}, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_vm_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_vm_operations.py new file mode 100644 index 000000000000..8cc6b21d943b --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_vm_operations.py @@ -0,0 +1,208 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementDatabaseMigrationsSqlVmOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.database_migrations_sql_vm.get( + resource_group_name=resource_group.name, + sql_virtual_machine_name="str", + target_db_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_or_update(self, resource_group): + response = self.client.database_migrations_sql_vm.begin_create_or_update( + resource_group_name=resource_group.name, + sql_virtual_machine_name="str", + target_db_name="str", + parameters={ + "id": "str", + "name": "str", + "properties": { + "kind": "SqlVm", + "backupConfiguration": { + "sourceLocation": { + "azureBlob": { + "accountKey": "str", + "blobContainerName": "str", + "storageAccountResourceId": "str", + }, + "fileShare": {"password": "str", "path": "str", "username": "str"}, + "fileStorageType": "str", + }, + "targetLocation": {"accountKey": "str", "storageAccountResourceId": "str"}, + }, + "endedOn": "2020-02-20 00:00:00", + "migrationFailureError": {"code": "str", "message": "str"}, + "migrationOperationId": "str", + "migrationService": "str", + "migrationStatus": "str", + "migrationStatusDetails": { + "activeBackupSets": [ + { + "backupFinishDate": "2020-02-20 00:00:00", + "backupSetId": "str", + "backupStartDate": "2020-02-20 00:00:00", + "backupType": "str", + "familyCount": 0, + "firstLSN": "str", + "hasBackupChecksums": bool, + "ignoreReasons": ["str"], + "isBackupRestored": bool, + "lastLSN": "str", + "listOfBackupFiles": [ + { + "copyDuration": 0, + "copyThroughput": 0.0, + "dataRead": 0, + "dataWritten": 0, + "familySequenceNumber": 0, + "fileName": "str", + "status": "str", + "totalSize": 0, + } + ], + } + ], + "blobContainerName": "str", + "completeRestoreErrorMessage": "str", + "currentRestoringFilename": "str", + "fileUploadBlockingErrors": ["str"], + "fullBackupSetInfo": { + "backupFinishDate": "2020-02-20 00:00:00", + "backupSetId": "str", + "backupStartDate": "2020-02-20 00:00:00", + "backupType": "str", + "familyCount": 0, + "firstLSN": "str", + "hasBackupChecksums": bool, + "ignoreReasons": ["str"], + "isBackupRestored": bool, + "lastLSN": "str", + "listOfBackupFiles": [ + { + "copyDuration": 0, + "copyThroughput": 0.0, + "dataRead": 0, + "dataWritten": 0, + "familySequenceNumber": 0, + "fileName": "str", + "status": "str", + "totalSize": 0, + } + ], + }, + "invalidFiles": ["str"], + "isFullBackupRestored": bool, + "lastRestoredBackupSetInfo": { + "backupFinishDate": "2020-02-20 00:00:00", + "backupSetId": "str", + "backupStartDate": "2020-02-20 00:00:00", + "backupType": "str", + "familyCount": 0, + "firstLSN": "str", + "hasBackupChecksums": bool, + "ignoreReasons": ["str"], + "isBackupRestored": bool, + "lastLSN": "str", + "listOfBackupFiles": [ + { + "copyDuration": 0, + "copyThroughput": 0.0, + "dataRead": 0, + "dataWritten": 0, + "familySequenceNumber": 0, + "fileName": "str", + "status": "str", + "totalSize": 0, + } + ], + }, + "lastRestoredFilename": "str", + "migrationState": "str", + "pendingLogBackupsCount": 0, + "restoreBlockingReason": "str", + }, + "offlineConfiguration": {"lastBackupName": "str", "offline": bool}, + "provisioningError": "str", + "provisioningState": "str", + "scope": "str", + "sourceDatabaseName": "str", + "sourceServerName": "str", + "sourceSqlConnection": { + "authentication": "str", + "dataSource": "str", + "encryptConnection": bool, + "password": "str", + "trustServerCertificate": bool, + "userName": "str", + }, + "startedOn": "2020-02-20 00:00:00", + "targetDatabaseCollation": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_cancel(self, resource_group): + response = self.client.database_migrations_sql_vm.begin_cancel( + resource_group_name=resource_group.name, + sql_virtual_machine_name="str", + target_db_name="str", + parameters={"migrationOperationId": "str"}, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_cutover(self, resource_group): + response = self.client.database_migrations_sql_vm.begin_cutover( + resource_group_name=resource_group.name, + sql_virtual_machine_name="str", + target_db_name="str", + parameters={"migrationOperationId": "str"}, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_vm_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_vm_operations_async.py new file mode 100644 index 000000000000..f733a38620bf --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_database_migrations_sql_vm_operations_async.py @@ -0,0 +1,215 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration.aio import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementDatabaseMigrationsSqlVmOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.database_migrations_sql_vm.get( + resource_group_name=resource_group.name, + sql_virtual_machine_name="str", + target_db_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_or_update(self, resource_group): + response = await ( + await self.client.database_migrations_sql_vm.begin_create_or_update( + resource_group_name=resource_group.name, + sql_virtual_machine_name="str", + target_db_name="str", + parameters={ + "id": "str", + "name": "str", + "properties": { + "kind": "SqlVm", + "backupConfiguration": { + "sourceLocation": { + "azureBlob": { + "accountKey": "str", + "blobContainerName": "str", + "storageAccountResourceId": "str", + }, + "fileShare": {"password": "str", "path": "str", "username": "str"}, + "fileStorageType": "str", + }, + "targetLocation": {"accountKey": "str", "storageAccountResourceId": "str"}, + }, + "endedOn": "2020-02-20 00:00:00", + "migrationFailureError": {"code": "str", "message": "str"}, + "migrationOperationId": "str", + "migrationService": "str", + "migrationStatus": "str", + "migrationStatusDetails": { + "activeBackupSets": [ + { + "backupFinishDate": "2020-02-20 00:00:00", + "backupSetId": "str", + "backupStartDate": "2020-02-20 00:00:00", + "backupType": "str", + "familyCount": 0, + "firstLSN": "str", + "hasBackupChecksums": bool, + "ignoreReasons": ["str"], + "isBackupRestored": bool, + "lastLSN": "str", + "listOfBackupFiles": [ + { + "copyDuration": 0, + "copyThroughput": 0.0, + "dataRead": 0, + "dataWritten": 0, + "familySequenceNumber": 0, + "fileName": "str", + "status": "str", + "totalSize": 0, + } + ], + } + ], + "blobContainerName": "str", + "completeRestoreErrorMessage": "str", + "currentRestoringFilename": "str", + "fileUploadBlockingErrors": ["str"], + "fullBackupSetInfo": { + "backupFinishDate": "2020-02-20 00:00:00", + "backupSetId": "str", + "backupStartDate": "2020-02-20 00:00:00", + "backupType": "str", + "familyCount": 0, + "firstLSN": "str", + "hasBackupChecksums": bool, + "ignoreReasons": ["str"], + "isBackupRestored": bool, + "lastLSN": "str", + "listOfBackupFiles": [ + { + "copyDuration": 0, + "copyThroughput": 0.0, + "dataRead": 0, + "dataWritten": 0, + "familySequenceNumber": 0, + "fileName": "str", + "status": "str", + "totalSize": 0, + } + ], + }, + "invalidFiles": ["str"], + "isFullBackupRestored": bool, + "lastRestoredBackupSetInfo": { + "backupFinishDate": "2020-02-20 00:00:00", + "backupSetId": "str", + "backupStartDate": "2020-02-20 00:00:00", + "backupType": "str", + "familyCount": 0, + "firstLSN": "str", + "hasBackupChecksums": bool, + "ignoreReasons": ["str"], + "isBackupRestored": bool, + "lastLSN": "str", + "listOfBackupFiles": [ + { + "copyDuration": 0, + "copyThroughput": 0.0, + "dataRead": 0, + "dataWritten": 0, + "familySequenceNumber": 0, + "fileName": "str", + "status": "str", + "totalSize": 0, + } + ], + }, + "lastRestoredFilename": "str", + "migrationState": "str", + "pendingLogBackupsCount": 0, + "restoreBlockingReason": "str", + }, + "offlineConfiguration": {"lastBackupName": "str", "offline": bool}, + "provisioningError": "str", + "provisioningState": "str", + "scope": "str", + "sourceDatabaseName": "str", + "sourceServerName": "str", + "sourceSqlConnection": { + "authentication": "str", + "dataSource": "str", + "encryptConnection": bool, + "password": "str", + "trustServerCertificate": bool, + "userName": "str", + }, + "startedOn": "2020-02-20 00:00:00", + "targetDatabaseCollation": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_cancel(self, resource_group): + response = await ( + await self.client.database_migrations_sql_vm.begin_cancel( + resource_group_name=resource_group.name, + sql_virtual_machine_name="str", + target_db_name="str", + parameters={"migrationOperationId": "str"}, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_cutover(self, resource_group): + response = await ( + await self.client.database_migrations_sql_vm.begin_cutover( + resource_group_name=resource_group.name, + sql_virtual_machine_name="str", + target_db_name="str", + parameters={"migrationOperationId": "str"}, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_files_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_files_operations.py new file mode 100644 index 000000000000..da95da36efd4 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_files_operations.py @@ -0,0 +1,158 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementFilesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.files.list( + group_name="str", + service_name="str", + project_name="str", + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.files.get( + group_name="str", + service_name="str", + project_name="str", + file_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.files.create_or_update( + group_name="str", + service_name="str", + project_name="str", + file_name="str", + parameters={ + "etag": "str", + "id": "str", + "name": "str", + "properties": { + "extension": "str", + "filePath": "str", + "lastModified": "2020-02-20 00:00:00", + "mediaType": "str", + "size": 0, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.files.delete( + group_name="str", + service_name="str", + project_name="str", + file_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_update(self, resource_group): + response = self.client.files.update( + group_name="str", + service_name="str", + project_name="str", + file_name="str", + parameters={ + "etag": "str", + "id": "str", + "name": "str", + "properties": { + "extension": "str", + "filePath": "str", + "lastModified": "2020-02-20 00:00:00", + "mediaType": "str", + "size": 0, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_read(self, resource_group): + response = self.client.files.read( + group_name="str", + service_name="str", + project_name="str", + file_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_read_write(self, resource_group): + response = self.client.files.read_write( + group_name="str", + service_name="str", + project_name="str", + file_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_files_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_files_operations_async.py new file mode 100644 index 000000000000..d043565e199b --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_files_operations_async.py @@ -0,0 +1,159 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration.aio import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementFilesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.files.list( + group_name="str", + service_name="str", + project_name="str", + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.files.get( + group_name="str", + service_name="str", + project_name="str", + file_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.files.create_or_update( + group_name="str", + service_name="str", + project_name="str", + file_name="str", + parameters={ + "etag": "str", + "id": "str", + "name": "str", + "properties": { + "extension": "str", + "filePath": "str", + "lastModified": "2020-02-20 00:00:00", + "mediaType": "str", + "size": 0, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.files.delete( + group_name="str", + service_name="str", + project_name="str", + file_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_update(self, resource_group): + response = await self.client.files.update( + group_name="str", + service_name="str", + project_name="str", + file_name="str", + parameters={ + "etag": "str", + "id": "str", + "name": "str", + "properties": { + "extension": "str", + "filePath": "str", + "lastModified": "2020-02-20 00:00:00", + "mediaType": "str", + "size": 0, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_read(self, resource_group): + response = await self.client.files.read( + group_name="str", + service_name="str", + project_name="str", + file_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_read_write(self, resource_group): + response = await self.client.files.read_write( + group_name="str", + service_name="str", + project_name="str", + file_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_migration_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_migration_services_operations.py new file mode 100644 index 000000000000..c717f8cb3ef6 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_migration_services_operations.py @@ -0,0 +1,118 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementMigrationServicesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.migration_services.get( + resource_group_name=resource_group.name, + migration_service_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_or_update(self, resource_group): + response = self.client.migration_services.begin_create_or_update( + resource_group_name=resource_group.name, + migration_service_name="str", + parameters={ + "location": "str", + "id": "str", + "integrationRuntimeState": "str", + "name": "str", + "provisioningState": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.migration_services.begin_delete( + resource_group_name=resource_group.name, + migration_service_name="str", + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update(self, resource_group): + response = self.client.migration_services.begin_update( + resource_group_name=resource_group.name, + migration_service_name="str", + parameters={"tags": {"str": "str"}}, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.migration_services.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_subscription(self, resource_group): + response = self.client.migration_services.list_by_subscription( + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_migrations(self, resource_group): + response = self.client.migration_services.list_migrations( + resource_group_name=resource_group.name, + migration_service_name="str", + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_migration_services_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_migration_services_operations_async.py new file mode 100644 index 000000000000..c272f7e9579a --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_migration_services_operations_async.py @@ -0,0 +1,125 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration.aio import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementMigrationServicesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.migration_services.get( + resource_group_name=resource_group.name, + migration_service_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_or_update(self, resource_group): + response = await ( + await self.client.migration_services.begin_create_or_update( + resource_group_name=resource_group.name, + migration_service_name="str", + parameters={ + "location": "str", + "id": "str", + "integrationRuntimeState": "str", + "name": "str", + "provisioningState": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.migration_services.begin_delete( + resource_group_name=resource_group.name, + migration_service_name="str", + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update(self, resource_group): + response = await ( + await self.client.migration_services.begin_update( + resource_group_name=resource_group.name, + migration_service_name="str", + parameters={"tags": {"str": "str"}}, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.migration_services.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_subscription(self, resource_group): + response = self.client.migration_services.list_by_subscription( + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_migrations(self, resource_group): + response = self.client.migration_services.list_migrations( + resource_group_name=resource_group.name, + migration_service_name="str", + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_operations.py new file mode 100644 index 000000000000..e31f44188643 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_operations.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.operations.list( + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_operations_async.py new file mode 100644 index 000000000000..089c0b21a7a2 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_operations_async.py @@ -0,0 +1,30 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration.aio import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.operations.list( + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_projects_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_projects_operations.py new file mode 100644 index 000000000000..c9e53994b71d --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_projects_operations.py @@ -0,0 +1,141 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementProjectsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.projects.list( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.projects.create_or_update( + group_name="str", + service_name="str", + project_name="str", + parameters={ + "azureAuthenticationInfo": { + "appKey": "str", + "applicationId": "str", + "ignoreAzurePermissions": bool, + "tenantId": "str", + }, + "creationTime": "2020-02-20 00:00:00", + "databasesInfo": [{"sourceDatabaseName": "str"}], + "etag": "str", + "id": "str", + "location": "str", + "name": "str", + "provisioningState": "str", + "sourceConnectionInfo": "connection_info", + "sourcePlatform": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "targetConnectionInfo": "connection_info", + "targetPlatform": "str", + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.projects.get( + group_name="str", + service_name="str", + project_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.projects.delete( + group_name="str", + service_name="str", + project_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_update(self, resource_group): + response = self.client.projects.update( + group_name="str", + service_name="str", + project_name="str", + parameters={ + "azureAuthenticationInfo": { + "appKey": "str", + "applicationId": "str", + "ignoreAzurePermissions": bool, + "tenantId": "str", + }, + "creationTime": "2020-02-20 00:00:00", + "databasesInfo": [{"sourceDatabaseName": "str"}], + "etag": "str", + "id": "str", + "location": "str", + "name": "str", + "provisioningState": "str", + "sourceConnectionInfo": "connection_info", + "sourcePlatform": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "targetConnectionInfo": "connection_info", + "targetPlatform": "str", + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_projects_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_projects_operations_async.py new file mode 100644 index 000000000000..6992e96fd1b4 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_projects_operations_async.py @@ -0,0 +1,142 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration.aio import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementProjectsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.projects.list( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.projects.create_or_update( + group_name="str", + service_name="str", + project_name="str", + parameters={ + "azureAuthenticationInfo": { + "appKey": "str", + "applicationId": "str", + "ignoreAzurePermissions": bool, + "tenantId": "str", + }, + "creationTime": "2020-02-20 00:00:00", + "databasesInfo": [{"sourceDatabaseName": "str"}], + "etag": "str", + "id": "str", + "location": "str", + "name": "str", + "provisioningState": "str", + "sourceConnectionInfo": "connection_info", + "sourcePlatform": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "targetConnectionInfo": "connection_info", + "targetPlatform": "str", + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.projects.get( + group_name="str", + service_name="str", + project_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.projects.delete( + group_name="str", + service_name="str", + project_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_update(self, resource_group): + response = await self.client.projects.update( + group_name="str", + service_name="str", + project_name="str", + parameters={ + "azureAuthenticationInfo": { + "appKey": "str", + "applicationId": "str", + "ignoreAzurePermissions": bool, + "tenantId": "str", + }, + "creationTime": "2020-02-20 00:00:00", + "databasesInfo": [{"sourceDatabaseName": "str"}], + "etag": "str", + "id": "str", + "location": "str", + "name": "str", + "provisioningState": "str", + "sourceConnectionInfo": "connection_info", + "sourcePlatform": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "targetConnectionInfo": "connection_info", + "targetPlatform": "str", + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_resource_skus_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_resource_skus_operations.py new file mode 100644 index 000000000000..30d6268a1632 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_resource_skus_operations.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementResourceSkusOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_skus(self, resource_group): + response = self.client.resource_skus.list_skus( + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_resource_skus_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_resource_skus_operations_async.py new file mode 100644 index 000000000000..2b269c3496c4 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_resource_skus_operations_async.py @@ -0,0 +1,30 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration.aio import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementResourceSkusOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_skus(self, resource_group): + response = self.client.resource_skus.list_skus( + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_service_tasks_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_service_tasks_operations.py new file mode 100644 index 000000000000..e9ebd4ce6643 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_service_tasks_operations.py @@ -0,0 +1,126 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementServiceTasksOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.service_tasks.list( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.service_tasks.create_or_update( + group_name="str", + service_name="str", + task_name="str", + parameters={ + "etag": "str", + "id": "str", + "name": "str", + "properties": "project_task_properties", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.service_tasks.get( + group_name="str", + service_name="str", + task_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.service_tasks.delete( + group_name="str", + service_name="str", + task_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_update(self, resource_group): + response = self.client.service_tasks.update( + group_name="str", + service_name="str", + task_name="str", + parameters={ + "etag": "str", + "id": "str", + "name": "str", + "properties": "project_task_properties", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_cancel(self, resource_group): + response = self.client.service_tasks.cancel( + group_name="str", + service_name="str", + task_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_service_tasks_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_service_tasks_operations_async.py new file mode 100644 index 000000000000..05705d88f17c --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_service_tasks_operations_async.py @@ -0,0 +1,127 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration.aio import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementServiceTasksOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.service_tasks.list( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.service_tasks.create_or_update( + group_name="str", + service_name="str", + task_name="str", + parameters={ + "etag": "str", + "id": "str", + "name": "str", + "properties": "project_task_properties", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.service_tasks.get( + group_name="str", + service_name="str", + task_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.service_tasks.delete( + group_name="str", + service_name="str", + task_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_update(self, resource_group): + response = await self.client.service_tasks.update( + group_name="str", + service_name="str", + task_name="str", + parameters={ + "etag": "str", + "id": "str", + "name": "str", + "properties": "project_task_properties", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_cancel(self, resource_group): + response = await self.client.service_tasks.cancel( + group_name="str", + service_name="str", + task_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_services_operations.py new file mode 100644 index 000000000000..33dabce78672 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_services_operations.py @@ -0,0 +1,209 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementServicesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_or_update(self, resource_group): + response = self.client.services.begin_create_or_update( + group_name="str", + service_name="str", + parameters={ + "autoStopDelay": "str", + "deleteResourcesOnStop": bool, + "etag": "str", + "id": "str", + "kind": "str", + "location": "str", + "name": "str", + "provisioningState": "str", + "publicKey": "str", + "sku": {"capacity": 0, "family": "str", "name": "str", "size": "str", "tier": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + "virtualNicId": "str", + "virtualSubnetId": "str", + }, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.services.get( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.services.begin_delete( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update(self, resource_group): + response = self.client.services.begin_update( + group_name="str", + service_name="str", + parameters={ + "autoStopDelay": "str", + "deleteResourcesOnStop": bool, + "etag": "str", + "id": "str", + "kind": "str", + "location": "str", + "name": "str", + "provisioningState": "str", + "publicKey": "str", + "sku": {"capacity": 0, "family": "str", "name": "str", "size": "str", "tier": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + "virtualNicId": "str", + "virtualSubnetId": "str", + }, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_check_status(self, resource_group): + response = self.client.services.check_status( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_start(self, resource_group): + response = self.client.services.begin_start( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_stop(self, resource_group): + response = self.client.services.begin_stop( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_skus(self, resource_group): + response = self.client.services.list_skus( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_check_children_name_availability(self, resource_group): + response = self.client.services.check_children_name_availability( + group_name="str", + service_name="str", + parameters={"name": "str", "type": "str"}, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.services.list_by_resource_group( + group_name="str", + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.services.list( + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_check_name_availability(self, resource_group): + response = self.client.services.check_name_availability( + location="str", + parameters={"name": "str", "type": "str"}, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_services_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_services_operations_async.py new file mode 100644 index 000000000000..c769c9240f7c --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_services_operations_async.py @@ -0,0 +1,220 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration.aio import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementServicesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_or_update(self, resource_group): + response = await ( + await self.client.services.begin_create_or_update( + group_name="str", + service_name="str", + parameters={ + "autoStopDelay": "str", + "deleteResourcesOnStop": bool, + "etag": "str", + "id": "str", + "kind": "str", + "location": "str", + "name": "str", + "provisioningState": "str", + "publicKey": "str", + "sku": {"capacity": 0, "family": "str", "name": "str", "size": "str", "tier": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + "virtualNicId": "str", + "virtualSubnetId": "str", + }, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.services.get( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.services.begin_delete( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update(self, resource_group): + response = await ( + await self.client.services.begin_update( + group_name="str", + service_name="str", + parameters={ + "autoStopDelay": "str", + "deleteResourcesOnStop": bool, + "etag": "str", + "id": "str", + "kind": "str", + "location": "str", + "name": "str", + "provisioningState": "str", + "publicKey": "str", + "sku": {"capacity": 0, "family": "str", "name": "str", "size": "str", "tier": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + "virtualNicId": "str", + "virtualSubnetId": "str", + }, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_check_status(self, resource_group): + response = await self.client.services.check_status( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_start(self, resource_group): + response = await ( + await self.client.services.begin_start( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_stop(self, resource_group): + response = await ( + await self.client.services.begin_stop( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_skus(self, resource_group): + response = self.client.services.list_skus( + group_name="str", + service_name="str", + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_check_children_name_availability(self, resource_group): + response = await self.client.services.check_children_name_availability( + group_name="str", + service_name="str", + parameters={"name": "str", "type": "str"}, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.services.list_by_resource_group( + group_name="str", + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.services.list( + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_check_name_availability(self, resource_group): + response = await self.client.services.check_name_availability( + location="str", + parameters={"name": "str", "type": "str"}, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_sql_migration_services_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_sql_migration_services_operations.py new file mode 100644 index 000000000000..ec84cf851dcc --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_sql_migration_services_operations.py @@ -0,0 +1,168 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementSqlMigrationServicesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.sql_migration_services.get( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_or_update(self, resource_group): + response = self.client.sql_migration_services.begin_create_or_update( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + parameters={ + "id": "str", + "integrationRuntimeState": "str", + "location": "str", + "name": "str", + "provisioningState": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.sql_migration_services.begin_delete( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update(self, resource_group): + response = self.client.sql_migration_services.begin_update( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + parameters={"tags": {"str": "str"}}, + api_version="2023-07-15-preview", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.sql_migration_services.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_auth_keys(self, resource_group): + response = self.client.sql_migration_services.list_auth_keys( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_regenerate_auth_keys(self, resource_group): + response = self.client.sql_migration_services.regenerate_auth_keys( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + parameters={"authKey1": "str", "authKey2": "str", "keyName": "str"}, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete_node(self, resource_group): + response = self.client.sql_migration_services.delete_node( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + parameters={"integrationRuntimeName": "str", "nodeName": "str"}, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_migrations(self, resource_group): + response = self.client.sql_migration_services.list_migrations( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_monitoring_data(self, resource_group): + response = self.client.sql_migration_services.list_monitoring_data( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_subscription(self, resource_group): + response = self.client.sql_migration_services.list_by_subscription( + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_sql_migration_services_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_sql_migration_services_operations_async.py new file mode 100644 index 000000000000..803b0207362b --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_sql_migration_services_operations_async.py @@ -0,0 +1,175 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration.aio import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementSqlMigrationServicesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.sql_migration_services.get( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_or_update(self, resource_group): + response = await ( + await self.client.sql_migration_services.begin_create_or_update( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + parameters={ + "id": "str", + "integrationRuntimeState": "str", + "location": "str", + "name": "str", + "provisioningState": "str", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.sql_migration_services.begin_delete( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update(self, resource_group): + response = await ( + await self.client.sql_migration_services.begin_update( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + parameters={"tags": {"str": "str"}}, + api_version="2023-07-15-preview", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.sql_migration_services.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_auth_keys(self, resource_group): + response = await self.client.sql_migration_services.list_auth_keys( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_regenerate_auth_keys(self, resource_group): + response = await self.client.sql_migration_services.regenerate_auth_keys( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + parameters={"authKey1": "str", "authKey2": "str", "keyName": "str"}, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete_node(self, resource_group): + response = await self.client.sql_migration_services.delete_node( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + parameters={"integrationRuntimeName": "str", "nodeName": "str"}, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_migrations(self, resource_group): + response = self.client.sql_migration_services.list_migrations( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_monitoring_data(self, resource_group): + response = await self.client.sql_migration_services.list_monitoring_data( + resource_group_name=resource_group.name, + sql_migration_service_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_subscription(self, resource_group): + response = self.client.sql_migration_services.list_by_subscription( + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_tasks_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_tasks_operations.py new file mode 100644 index 000000000000..4b80b116df87 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_tasks_operations.py @@ -0,0 +1,164 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementTasksOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.tasks.list( + group_name="str", + service_name="str", + project_name="str", + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.tasks.create_or_update( + group_name="str", + service_name="str", + project_name="str", + task_name="str", + parameters={ + "etag": "str", + "id": "str", + "name": "str", + "properties": "project_task_properties", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.tasks.get( + group_name="str", + service_name="str", + project_name="str", + task_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.tasks.delete( + group_name="str", + service_name="str", + project_name="str", + task_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_update(self, resource_group): + response = self.client.tasks.update( + group_name="str", + service_name="str", + project_name="str", + task_name="str", + parameters={ + "etag": "str", + "id": "str", + "name": "str", + "properties": "project_task_properties", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_cancel(self, resource_group): + response = self.client.tasks.cancel( + group_name="str", + service_name="str", + project_name="str", + task_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_command(self, resource_group): + response = self.client.tasks.command( + group_name="str", + service_name="str", + project_name="str", + task_name="str", + parameters={ + "commandType": "Migrate.SqlServer.AzureDbSqlMi.Complete", + "errors": [{"code": "str", "details": [...], "message": "str"}], + "input": {"sourceDatabaseName": "str"}, + "output": { + "errors": [ + { + "actionableMessage": "str", + "filePath": "str", + "hResult": 0, + "lineNumber": "str", + "message": "str", + "stackTrace": "str", + } + ] + }, + "state": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_tasks_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_tasks_operations_async.py new file mode 100644 index 000000000000..413792a64727 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_tasks_operations_async.py @@ -0,0 +1,165 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration.aio import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementTasksOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.tasks.list( + group_name="str", + service_name="str", + project_name="str", + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.tasks.create_or_update( + group_name="str", + service_name="str", + project_name="str", + task_name="str", + parameters={ + "etag": "str", + "id": "str", + "name": "str", + "properties": "project_task_properties", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.tasks.get( + group_name="str", + service_name="str", + project_name="str", + task_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.tasks.delete( + group_name="str", + service_name="str", + project_name="str", + task_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_update(self, resource_group): + response = await self.client.tasks.update( + group_name="str", + service_name="str", + project_name="str", + task_name="str", + parameters={ + "etag": "str", + "id": "str", + "name": "str", + "properties": "project_task_properties", + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_cancel(self, resource_group): + response = await self.client.tasks.cancel( + group_name="str", + service_name="str", + project_name="str", + task_name="str", + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_command(self, resource_group): + response = await self.client.tasks.command( + group_name="str", + service_name="str", + project_name="str", + task_name="str", + parameters={ + "commandType": "Migrate.SqlServer.AzureDbSqlMi.Complete", + "errors": [{"code": "str", "details": [...], "message": "str"}], + "input": {"sourceDatabaseName": "str"}, + "output": { + "errors": [ + { + "actionableMessage": "str", + "filePath": "str", + "hResult": 0, + "lineNumber": "str", + "message": "str", + "stackTrace": "str", + } + ] + }, + "state": "str", + }, + api_version="2023-07-15-preview", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_usages_operations.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_usages_operations.py new file mode 100644 index 000000000000..693a9beaad51 --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_usages_operations.py @@ -0,0 +1,30 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementUsagesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.usages.list( + location="str", + api_version="2023-07-15-preview", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_usages_operations_async.py b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_usages_operations_async.py new file mode 100644 index 000000000000..a83141dd310a --- /dev/null +++ b/sdk/datamigration/azure-mgmt-datamigration/generated_tests/test_data_migration_management_usages_operations_async.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datamigration.aio import DataMigrationManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataMigrationManagementUsagesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataMigrationManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.usages.list( + location="str", + api_version="2023-07-15-preview", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datamigration/azure-mgmt-datamigration/setup.py b/sdk/datamigration/azure-mgmt-datamigration/setup.py index a08d35aa86a5..827dcf62a2f9 100644 --- a/sdk/datamigration/azure-mgmt-datamigration/setup.py +++ b/sdk/datamigration/azure-mgmt-datamigration/setup.py @@ -1,10 +1,10 @@ #!/usr/bin/env python -#------------------------------------------------------------------------- +# ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. -#-------------------------------------------------------------------------- +# -------------------------------------------------------------------------- import re import os.path @@ -16,64 +16,68 @@ PACKAGE_PPRINT_NAME = "Data Migration" # a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace('-', '/') +package_folder_path = PACKAGE_NAME.replace("-", "/") # a-b-c => a.b.c -namespace_name = PACKAGE_NAME.replace('-', '.') +namespace_name = PACKAGE_NAME.replace("-", ".") # Version extraction inspired from 'requests' -with open(os.path.join(package_folder_path, 'version.py') - if os.path.exists(os.path.join(package_folder_path, 'version.py')) - else os.path.join(package_folder_path, '_version.py'), 'r') as fd: - version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', - fd.read(), re.MULTILINE).group(1) +with open( + os.path.join(package_folder_path, "version.py") + if os.path.exists(os.path.join(package_folder_path, "version.py")) + else os.path.join(package_folder_path, "_version.py"), + "r", +) as fd: + version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) if not version: - raise RuntimeError('Cannot find version information') + raise RuntimeError("Cannot find version information") -with open('README.md', encoding='utf-8') as f: +with open("README.md", encoding="utf-8") as f: readme = f.read() -with open('CHANGELOG.md', encoding='utf-8') as f: +with open("CHANGELOG.md", encoding="utf-8") as f: changelog = f.read() setup( name=PACKAGE_NAME, version=version, - description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME), - long_description=readme + '\n\n' + changelog, - long_description_content_type='text/markdown', - license='MIT License', - author='Microsoft Corporation', - author_email='azpysdkhelp@microsoft.com', - url='https://github.com/Azure/azure-sdk-for-python', + description="Microsoft Azure {} Client Library for Python".format(PACKAGE_PPRINT_NAME), + long_description=readme + "\n\n" + changelog, + long_description_content_type="text/markdown", + license="MIT License", + author="Microsoft Corporation", + author_email="azpysdkhelp@microsoft.com", + url="https://github.com/Azure/azure-sdk-for-python", keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'License :: OSI Approved :: MIT License', + "Development Status :: 5 - Production/Stable", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "License :: OSI Approved :: MIT License", ], zip_safe=False, - packages=find_packages(exclude=[ - 'tests', - # Exclude packages that will be covered by PEP420 or nspkg - 'azure', - 'azure.mgmt', - ]), + packages=find_packages( + exclude=[ + "tests", + # Exclude packages that will be covered by PEP420 or nspkg + "azure", + "azure.mgmt", + ] + ), include_package_data=True, package_data={ - 'pytyped': ['py.typed'], + "pytyped": ["py.typed"], }, install_requires=[ - "msrest>=0.7.1", - "azure-common~=1.1", - "azure-mgmt-core>=1.3.2,<2.0.0", - "typing-extensions>=4.3.0; python_version<'3.8.0'", + "isodate>=0.6.1", + "typing-extensions>=4.6.0", + "azure-common>=1.1", + "azure-mgmt-core>=1.3.2", ], - python_requires=">=3.7" + python_requires=">=3.8", )