diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/CHANGELOG.md b/sdk/resourceconnector/azure-mgmt-resourceconnector/CHANGELOG.md
index 31adc0ee12a6..2baee9685a6b 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/CHANGELOG.md
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/CHANGELOG.md
@@ -1,5 +1,11 @@
# Release History
+## 1.1.0 (2025-04-11)
+
+### Features Added
+
+ - Method `AppliancesOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, resource_name: str, parameters: IO[bytes], content_type: str)`
+
## 1.0.0 (2023-08-18)
### Features Added
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/README.md b/sdk/resourceconnector/azure-mgmt-resourceconnector/README.md
index 62bcc0858b1f..4f3e55ad0f1d 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/README.md
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/README.md
@@ -1,7 +1,7 @@
# Microsoft Azure SDK for Python
This is the Microsoft Azure Resource Connector Management Client Library.
-This package has been tested with Python 3.7+.
+This package has been tested with Python 3.8+.
For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all).
## _Disclaimer_
@@ -12,7 +12,7 @@ _Azure SDK Python packages support for Python 2.7 has ended 01 January 2022. For
### Prerequisites
-- Python 3.7+ is required to use this package.
+- Python 3.8+ is required to use this package.
- [Azure subscription](https://azure.microsoft.com/free/)
### Install the package
@@ -24,7 +24,7 @@ pip install azure-identity
### Authentication
-By default, [Azure Active Directory](https://aka.ms/awps/aad) token authentication depends on correct configure of following environment variables.
+By default, [Azure Active Directory](https://aka.ms/awps/aad) token authentication depends on correct configuration of the following environment variables.
- `AZURE_CLIENT_ID` for Azure client ID.
- `AZURE_TENANT_ID` for Azure tenant ID.
@@ -59,6 +59,3 @@ Code samples for this package can be found at:
If you encounter any bugs or have suggestions, please file an issue in the
[Issues](https://github.com/Azure/azure-sdk-for-python/issues)
section of the project.
-
-
-
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/_meta.json b/sdk/resourceconnector/azure-mgmt-resourceconnector/_meta.json
index 7f85414b46cf..4a4006eecc3e 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/_meta.json
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/_meta.json
@@ -1,11 +1,11 @@
{
- "commit": "990705d56b53a74758ce7018d1dad684bc5cf592",
+ "commit": "21876065ad90da6c3adc846dc679633241dd581f",
"repository_url": "https://github.com/Azure/azure-rest-api-specs",
- "autorest": "3.9.7",
+ "autorest": "3.10.2",
"use": [
- "@autorest/python@6.7.1",
- "@autorest/modelerfour@4.26.2"
+ "@autorest/python@6.27.4",
+ "@autorest/modelerfour@4.27.0"
],
- "autorest_command": "autorest specification/resourceconnector/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.7.1 --use=@autorest/modelerfour@4.26.2 --version=3.9.7 --version-tolerant=False",
+ "autorest_command": "autorest specification/resourceconnector/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.27.4 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False",
"readme": "specification/resourceconnector/resource-manager/readme.md"
}
\ No newline at end of file
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/__init__.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/__init__.py
index 417696cd56e4..13f2ad2332b6 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/__init__.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/__init__.py
@@ -5,15 +5,21 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._resource_connector_mgmt_client import ResourceConnectorMgmtClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._resource_connector_mgmt_client import ResourceConnectorMgmtClient # type: ignore
from ._version import VERSION
__version__ = VERSION
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -21,6 +27,6 @@
__all__ = [
"ResourceConnectorMgmtClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_configuration.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_configuration.py
index e48d55df7b6d..459ed3ed575a 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_configuration.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_configuration.py
@@ -8,18 +8,16 @@
from typing import Any, TYPE_CHECKING
-from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy
from ._version import VERSION
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
-class ResourceConnectorMgmtClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
+class ResourceConnectorMgmtClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for ResourceConnectorMgmtClient.
Note that all parameters used to create this instance are saved as instance
@@ -35,7 +33,6 @@ class ResourceConnectorMgmtClientConfiguration(Configuration): # pylint: disabl
"""
def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None:
- super(ResourceConnectorMgmtClientConfiguration, self).__init__(**kwargs)
api_version: str = kwargs.pop("api_version", "2022-10-27")
if credential is None:
@@ -48,6 +45,7 @@ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs
self.api_version = api_version
self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"])
kwargs.setdefault("sdk_moniker", "mgmt-resourceconnector/{}".format(VERSION))
+ self.polling_interval = kwargs.get("polling_interval", 30)
self._configure(**kwargs)
def _configure(self, **kwargs: Any) -> None:
@@ -56,9 +54,9 @@ def _configure(self, **kwargs: Any) -> None:
self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs)
- self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs)
+ self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
self.authentication_policy = kwargs.get("authentication_policy")
if self.credential and not self.authentication_policy:
self.authentication_policy = ARMChallengeAuthenticationPolicy(
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_resource_connector_mgmt_client.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_resource_connector_mgmt_client.py
index 9acd949d88d3..938981798752 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_resource_connector_mgmt_client.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_resource_connector_mgmt_client.py
@@ -8,9 +8,12 @@
from copy import deepcopy
from typing import Any, TYPE_CHECKING
+from typing_extensions import Self
+from azure.core.pipeline import policies
from azure.core.rest import HttpRequest, HttpResponse
from azure.mgmt.core import ARMPipelineClient
+from azure.mgmt.core.policies import ARMAutoResourceProviderRegistrationPolicy
from . import models as _models
from ._configuration import ResourceConnectorMgmtClientConfiguration
@@ -18,11 +21,10 @@
from .operations import AppliancesOperations
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
-class ResourceConnectorMgmtClient: # pylint: disable=client-accepts-api-version-keyword
+class ResourceConnectorMgmtClient:
"""The appliances Rest API spec.
:ivar appliances: AppliancesOperations operations
@@ -50,7 +52,25 @@ def __init__(
self._config = ResourceConnectorMgmtClientConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
- self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+ _policies = kwargs.pop("policies", None)
+ if _policies is None:
+ _policies = [
+ policies.RequestIdPolicy(**kwargs),
+ self._config.headers_policy,
+ self._config.user_agent_policy,
+ self._config.proxy_policy,
+ policies.ContentDecodePolicy(**kwargs),
+ ARMAutoResourceProviderRegistrationPolicy(),
+ self._config.redirect_policy,
+ self._config.retry_policy,
+ self._config.authentication_policy,
+ self._config.custom_hook_policy,
+ self._config.logging_policy,
+ policies.DistributedTracingPolicy(**kwargs),
+ policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None,
+ self._config.http_logging_policy,
+ ]
+ self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, policies=_policies, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
@@ -58,7 +78,7 @@ def __init__(
self._serialize.client_side_validation = False
self.appliances = AppliancesOperations(self._client, self._config, self._serialize, self._deserialize)
- def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
+ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
@@ -78,12 +98,12 @@ def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
- return self._client.send_request(request_copy, **kwargs)
+ return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore
def close(self) -> None:
self._client.close()
- def __enter__(self) -> "ResourceConnectorMgmtClient":
+ def __enter__(self) -> Self:
self._client.__enter__()
return self
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_serialization.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_serialization.py
index 4bae2292227b..b24ab2885450 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_serialization.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_serialization.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -24,7 +25,6 @@
#
# --------------------------------------------------------------------------
-# pylint: skip-file
# pyright: reportUnnecessaryTypeIgnoreComment=false
from base64 import b64decode, b64encode
@@ -52,7 +52,6 @@
MutableMapping,
Type,
List,
- Mapping,
)
try:
@@ -63,8 +62,8 @@
import isodate # type: ignore
-from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback
-from azure.core.serialization import NULL as AzureCoreNull
+from azure.core.exceptions import DeserializationError, SerializationError
+from azure.core.serialization import NULL as CoreNull
_BOM = codecs.BOM_UTF8.decode(encoding="utf-8")
@@ -91,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
:param data: Input, could be bytes or stream (will be decoded with UTF8) or text
:type data: str or bytes or IO
:param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
"""
if hasattr(data, "read"):
# Assume a stream
@@ -112,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
try:
return json.loads(data_as_str)
except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err)
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
elif "xml" in (content_type or []):
try:
@@ -124,7 +125,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
pass
return ET.fromstring(data_as_str) # nosec
- except ET.ParseError:
+ except ET.ParseError as err:
# It might be because the server has an issue, and returned JSON with
# content-type XML....
# So let's try a JSON load, and if it's still broken
@@ -143,7 +144,9 @@ def _json_attemp(data):
# The function hack is because Py2.7 messes up with exception
# context otherwise.
_LOGGER.critical("Wasn't XML not JSON, failing")
- raise_with_traceback(DeserializationError, "XML is invalid")
+ raise DeserializationError("XML is invalid") from err
+ elif content_type.startswith("text/"):
+ return data_as_str
raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
@classmethod
@@ -153,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
Use bytes and headers to NOT use any requests/aiohttp or whatever
specific implementation.
Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
"""
# Try to use content-type from headers if available
content_type = None
@@ -170,13 +178,6 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
return None
-try:
- basestring # type: ignore
- unicode_str = unicode # type: ignore
-except NameError:
- basestring = str
- unicode_str = str
-
_LOGGER = logging.getLogger(__name__)
try:
@@ -189,15 +190,30 @@ class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
- """UTF offset for UTC is 0."""
+ """UTF offset for UTC is 0.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The offset
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(0)
def tzname(self, dt):
- """Timestamp representation."""
+ """Timestamp representation.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The timestamp representation
+ :rtype: str
+ """
return "Z"
def dst(self, dt):
- """No daylight saving for UTC."""
+ """No daylight saving for UTC.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The daylight saving time
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(hours=1)
@@ -211,7 +227,7 @@ class _FixedOffset(datetime.tzinfo): # type: ignore
:param datetime.timedelta offset: offset in timedelta format
"""
- def __init__(self, offset):
+ def __init__(self, offset) -> None:
self.__offset = offset
def utcoffset(self, dt):
@@ -240,24 +256,26 @@ def __getinitargs__(self):
_FLATTEN = re.compile(r"(? None:
- self.additional_properties: Dict[str, Any] = {}
- for k in kwargs:
+ self.additional_properties: Optional[Dict[str, Any]] = {}
+ for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
elif k in self._validation and self._validation[k].get("readonly", False):
@@ -305,13 +330,23 @@ def __init__(self, **kwargs: Any) -> None:
setattr(self, k, kwargs[k])
def __eq__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
return not self.__eq__(other)
def __str__(self) -> str:
@@ -331,7 +366,11 @@ def is_xml_model(cls) -> bool:
@classmethod
def _create_xml_node(cls):
- """Create XML node."""
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
try:
xml_map = cls._xml_map # type: ignore
except AttributeError:
@@ -340,7 +379,7 @@ def _create_xml_node(cls):
return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None))
def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
- """Return the JSON that would be sent to azure from this model.
+ """Return the JSON that would be sent to server from this model.
This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`.
@@ -351,7 +390,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs)
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
def as_dict(
self,
@@ -385,12 +426,15 @@ def my_key_transformer(key, attr_desc, value):
If you want XML serialization, you can pass the kwargs is_xml=True.
+ :param bool keep_readonly: If you want to serialize the readonly attributes
:param function key_transformer: A key transformer function.
:returns: A dict JSON compatible object
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs)
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
@classmethod
def _infer_class_models(cls):
@@ -400,7 +444,7 @@ def _infer_class_models(cls):
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
if cls.__name__ not in client_models:
raise ValueError("Not Autorest generated code")
- except Exception:
+ except Exception: # pylint: disable=broad-exception-caught
# Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
client_models = {cls.__name__: cls}
return client_models
@@ -413,9 +457,10 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
- return deserializer(cls.__name__, data, content_type=content_type)
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@classmethod
def from_dict(
@@ -431,9 +476,11 @@ def from_dict(
and last_rest_key_case_insensitive_extractor)
:param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
deserializer.key_extractors = ( # type: ignore
@@ -445,7 +492,7 @@ def from_dict(
if key_extractors is None
else key_extractors
)
- return deserializer(cls.__name__, data, content_type=content_type)
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@classmethod
def _flatten_subtype(cls, key, objects):
@@ -453,21 +500,25 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects))
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
return result
@classmethod
def _classify(cls, response, objects):
"""Check the class _subtype_map for any child classes.
We want to ignore any inherited _subtype_maps.
- Remove the polymorphic key from the initial data.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
"""
for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
subtype_value = None
if not isinstance(response, ET.Element):
rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
- subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None)
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
else:
subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
if subtype_value:
@@ -506,11 +557,13 @@ def _decode_attribute_map_key(key):
inside the received data.
:param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
"""
return key.replace("\\.", ".")
-class Serializer(object):
+class Serializer: # pylint: disable=too-many-public-methods
"""Request object model serializer."""
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
@@ -545,7 +598,7 @@ class Serializer(object):
"multiple": lambda x, y: x % y != 0,
}
- def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.serialize_type = {
"iso-8601": Serializer.serialize_iso,
"rfc-1123": Serializer.serialize_rfc,
@@ -561,17 +614,20 @@ def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None):
"[]": self.serialize_iter,
"{}": self.serialize_dict,
}
- self.dependencies: Dict[str, Type[ModelType]] = dict(classes) if classes else {}
+ self.dependencies: Dict[str, type] = dict(classes) if classes else {}
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
- def _serialize(self, target_obj, data_type=None, **kwargs):
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
"""Serialize data into a string according to type.
- :param target_obj: The data to be serialized.
+ :param object target_obj: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str, dict
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
"""
key_transformer = kwargs.get("key_transformer", self.key_transformer)
keep_readonly = kwargs.get("keep_readonly", False)
@@ -597,12 +653,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
serialized = {}
if is_xml_model_serialization:
- serialized = target_obj._create_xml_node()
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
try:
- attributes = target_obj._attribute_map
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
for attr, attr_desc in attributes.items():
attr_name = attr
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
continue
if attr_name == "additional_properties" and attr_desc["key"] == "":
@@ -638,7 +696,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if isinstance(new_attr, list):
serialized.extend(new_attr) # type: ignore
elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
if "name" not in getattr(orig_attr, "_xml_map", {}):
splitted_tag = new_attr.tag.split("}")
if len(splitted_tag) == 2: # Namespace
@@ -649,7 +708,7 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
else: # That's a basic type
# Integrate namespace if necessary
local_node = _create_xml_node(xml_name, xml_prefix, xml_ns)
- local_node.text = unicode_str(new_attr)
+ local_node.text = str(new_attr)
serialized.append(local_node) # type: ignore
else: # JSON
for k in reversed(keys): # type: ignore
@@ -668,18 +727,18 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
- raise_with_traceback(SerializationError, msg, err)
- else:
- return serialized
+ raise SerializationError(msg) from err
+ return serialized
def body(self, data, data_type, **kwargs):
"""Serialize data intended for a request body.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: dict
:raises: SerializationError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized request body
"""
# Just in case this is a dict
@@ -708,18 +767,20 @@ def body(self, data, data_type, **kwargs):
attribute_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
]
- data = deserializer._deserialize(data_type, data)
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
except DeserializationError as err:
- raise_with_traceback(SerializationError, "Unable to build a model: " + str(err), err)
+ raise SerializationError("Unable to build a model: " + str(err)) from err
return self._serialize(data, data_type, **kwargs)
def url(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL path.
- :param data: The data to be serialized.
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
+ :returns: The serialized URL path
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
"""
@@ -730,30 +791,30 @@ def url(self, name, data, data_type, **kwargs):
if kwargs.get("skip_quote") is True:
output = str(output)
+ output = output.replace("{", quote("{")).replace("}", quote("}"))
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return output
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
def query(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL query.
- :param data: The data to be serialized.
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :keyword bool skip_quote: Whether to skip quote the serialized result.
- Defaults to False.
- :rtype: str
+ :rtype: str, list
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized query parameter
"""
try:
# Treat the list aside, since we don't want to encode the div separator
if data_type.startswith("["):
internal_data_type = data_type[1:-1]
do_quote = not kwargs.get("skip_quote", False)
- return str(self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs))
+ return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)
# Not a list, regular serialization
output = self.serialize_data(data, data_type, **kwargs)
@@ -763,19 +824,20 @@ def query(self, name, data, data_type, **kwargs):
output = str(output)
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def header(self, name, data, data_type, **kwargs):
"""Serialize data intended for a request header.
- :param data: The data to be serialized.
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized header
"""
try:
if data_type in ["[str]"]:
@@ -784,32 +846,31 @@ def header(self, name, data, data_type, **kwargs):
output = self.serialize_data(data, data_type, **kwargs)
if data_type == "bool":
output = json.dumps(output)
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def serialize_data(self, data, data_type, **kwargs):
"""Serialize generic data according to supplied data type.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :param bool required: Whether it's essential that the data not be
- empty or None
:raises: AttributeError if required data is None.
:raises: ValueError if data is None
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
"""
if data is None:
raise ValueError("No value for given attribute")
try:
- if data is AzureCoreNull:
+ if data is CoreNull:
return None
if data_type in self.basic_types.values():
return self.serialize_basic(data, data_type, **kwargs)
- elif data_type in self.serialize_type:
+ if data_type in self.serialize_type:
return self.serialize_type[data_type](data, **kwargs)
# If dependencies is empty, try with current data class
@@ -824,12 +885,11 @@ def serialize_data(self, data, data_type, **kwargs):
except (ValueError, TypeError) as err:
msg = "Unable to serialize value: {!r} as type: {!r}."
- raise_with_traceback(SerializationError, msg.format(data, data_type), err)
- else:
- return self._serialize(data, **kwargs)
+ raise SerializationError(msg.format(data, data_type)) from err
+ return self._serialize(data, **kwargs)
@classmethod
- def _get_custom_serializers(cls, data_type, **kwargs):
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
if custom_serializer:
return custom_serializer
@@ -845,23 +905,26 @@ def serialize_basic(cls, data, data_type, **kwargs):
- basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- is_xml bool : If set, use xml_basic_types_serializers
- :param data: Object to be serialized.
+ :param obj data: Object to be serialized.
:param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
@classmethod
def serialize_unicode(cls, data):
"""Special handling for serializing unicode strings in Py2.
Encode to UTF-8 if unicode, otherwise handle as a str.
- :param data: Object to be serialized.
+ :param str data: Object to be serialized.
:rtype: str
+ :return: serialized object
"""
try: # If I received an enum, return its value
return data.value
@@ -875,8 +938,7 @@ def serialize_unicode(cls, data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
def serialize_iter(self, data, iter_type, div=None, **kwargs):
"""Serialize iterable.
@@ -886,15 +948,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
serialization_ctxt['type'] should be same as data_type.
- is_xml bool : If set, serialize as XML
- :param list attr: Object to be serialized.
+ :param list data: Object to be serialized.
:param str iter_type: Type of object in the iterable.
- :param bool required: Whether the objects in the iterable must
- not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
- :keyword bool do_quote: Whether to quote the serialized result of each iterable element.
Defaults to False.
:rtype: list, str
+ :return: serialized iterable
"""
if isinstance(data, str):
raise SerializationError("Refuse str type as a valid iter type.")
@@ -949,9 +1009,8 @@ def serialize_dict(self, attr, dict_type, **kwargs):
:param dict attr: Object to be serialized.
:param str dict_type: Type of object in the dictionary.
- :param bool required: Whether the objects in the dictionary must
- not be None or empty.
:rtype: dict
+ :return: serialized dictionary
"""
serialization_ctxt = kwargs.get("serialization_ctxt", {})
serialized = {}
@@ -975,7 +1034,7 @@ def serialize_dict(self, attr, dict_type, **kwargs):
return serialized
- def serialize_object(self, attr, **kwargs):
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Serialize a generic object.
This will be handled as a dictionary. If object passed in is not
a basic type (str, int, float, dict, list) it will simply be
@@ -983,6 +1042,7 @@ def serialize_object(self, attr, **kwargs):
:param dict attr: Object to be serialized.
:rtype: dict or str
+ :return: serialized object
"""
if attr is None:
return None
@@ -993,7 +1053,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs)
if obj_type is _long_type:
return self.serialize_long(attr)
- if obj_type is unicode_str:
+ if obj_type is str:
return self.serialize_unicode(attr)
if obj_type is datetime.datetime:
return self.serialize_iso(attr)
@@ -1007,7 +1067,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_decimal(attr)
# If it's a model or I know this dependency, serialize as a Model
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
return self._serialize(attr)
if obj_type == dict:
@@ -1038,56 +1098,61 @@ def serialize_enum(attr, enum_obj=None):
try:
enum_obj(result) # type: ignore
return result
- except ValueError:
+ except ValueError as exc:
for enum_value in enum_obj: # type: ignore
if enum_value.value.lower() == str(attr).lower():
return enum_value.value
error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj))
+ raise SerializationError(error.format(attr, enum_obj)) from exc
@staticmethod
- def serialize_bytearray(attr, **kwargs):
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize bytearray into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
return b64encode(attr).decode()
@staticmethod
- def serialize_base64(attr, **kwargs):
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize str into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
encoded = b64encode(attr).decode("ascii")
return encoded.strip("=").replace("+", "-").replace("/", "_")
@staticmethod
- def serialize_decimal(attr, **kwargs):
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Decimal object to float.
- :param attr: Object to be serialized.
+ :param decimal attr: Object to be serialized.
:rtype: float
+ :return: serialized decimal
"""
return float(attr)
@staticmethod
- def serialize_long(attr, **kwargs):
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize long (Py2) or int (Py3).
- :param attr: Object to be serialized.
+ :param int attr: Object to be serialized.
:rtype: int/long
+ :return: serialized long
"""
return _long_type(attr)
@staticmethod
- def serialize_date(attr, **kwargs):
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Date object into ISO-8601 formatted string.
:param Date attr: Object to be serialized.
:rtype: str
+ :return: serialized date
"""
if isinstance(attr, str):
attr = isodate.parse_date(attr)
@@ -1095,11 +1160,12 @@ def serialize_date(attr, **kwargs):
return t
@staticmethod
- def serialize_time(attr, **kwargs):
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Time object into ISO-8601 formatted string.
:param datetime.time attr: Object to be serialized.
:rtype: str
+ :return: serialized time
"""
if isinstance(attr, str):
attr = isodate.parse_time(attr)
@@ -1109,30 +1175,32 @@ def serialize_time(attr, **kwargs):
return t
@staticmethod
- def serialize_duration(attr, **kwargs):
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize TimeDelta object into ISO-8601 formatted string.
:param TimeDelta attr: Object to be serialized.
:rtype: str
+ :return: serialized duration
"""
if isinstance(attr, str):
attr = isodate.parse_duration(attr)
return isodate.duration_isoformat(attr)
@staticmethod
- def serialize_rfc(attr, **kwargs):
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into RFC-1123 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: TypeError if format invalid.
+ :return: serialized rfc
"""
try:
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
utc = attr.utctimetuple()
- except AttributeError:
- raise TypeError("RFC1123 object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
Serializer.days[utc.tm_wday],
@@ -1145,12 +1213,13 @@ def serialize_rfc(attr, **kwargs):
)
@staticmethod
- def serialize_iso(attr, **kwargs):
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: SerializationError if format invalid.
+ :return: serialized iso
"""
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
@@ -1170,19 +1239,20 @@ def serialize_iso(attr, **kwargs):
return date + microseconds + "Z"
except (ValueError, OverflowError) as err:
msg = "Unable to serialize datetime object."
- raise_with_traceback(SerializationError, msg, err)
+ raise SerializationError(msg) from err
except AttributeError as err:
msg = "ISO-8601 object must be valid Datetime object."
- raise_with_traceback(TypeError, msg, err)
+ raise TypeError(msg) from err
@staticmethod
- def serialize_unix(attr, **kwargs):
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into IntTime format.
This is represented as seconds.
:param Datetime attr: Object to be serialized.
:rtype: int
:raises: SerializationError if format invalid
+ :return: serialied unix
"""
if isinstance(attr, int):
return attr
@@ -1190,11 +1260,11 @@ def serialize_unix(attr, **kwargs):
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError:
- raise TypeError("Unix time object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
-def rest_key_extractor(attr, attr_desc, data):
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
key = attr_desc["key"]
working_data = data
@@ -1209,14 +1279,15 @@ def rest_key_extractor(attr, attr_desc, data):
if working_data is None:
# If at any point while following flatten JSON path see None, it means
# that all properties under are None as well
- # https://github.com/Azure/msrest-for-python/issues/197
return None
key = ".".join(dict_keys[1:])
return working_data.get(key)
-def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
key = attr_desc["key"]
working_data = data
@@ -1230,7 +1301,6 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
if working_data is None:
# If at any point while following flatten JSON path see None, it means
# that all properties under are None as well
- # https://github.com/Azure/msrest-for-python/issues/197
return None
key = ".".join(dict_keys[1:])
@@ -1238,17 +1308,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
return attribute_key_case_insensitive_extractor(key, None, working_data)
-def last_rest_key_extractor(attr, attr_desc, data):
- """Extract the attribute in "data" based on the last part of the JSON path key."""
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
return attribute_key_extractor(dict_keys[-1], None, data)
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
"""Extract the attribute in "data" based on the last part of the JSON path key.
This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
"""
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
@@ -1285,7 +1367,7 @@ def _extract_name_from_internal_type(internal_type):
return xml_name
-def xml_key_extractor(attr, attr_desc, data):
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
if isinstance(data, dict):
return None
@@ -1337,22 +1419,21 @@ def xml_key_extractor(attr, attr_desc, data):
if is_iter_type:
if is_wrapped:
return None # is_wrapped no node, we want None
- else:
- return [] # not wrapped, assume empty list
+ return [] # not wrapped, assume empty list
return None # Assume it's not there, maybe an optional node.
# If is_iter_type and not wrapped, return all found children
if is_iter_type:
if not is_wrapped:
return children
- else: # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
)
- return list(children[0]) # Might be empty list and that's ok.
+ )
+ return list(children[0]) # Might be empty list and that's ok.
# Here it's not a itertype, we should have found one element only or empty
if len(children) > 1:
@@ -1360,7 +1441,7 @@ def xml_key_extractor(attr, attr_desc, data):
return children[0]
-class Deserializer(object):
+class Deserializer:
"""Response object model deserializer.
:param dict classes: Class type dictionary for deserializing complex types.
@@ -1369,9 +1450,9 @@ class Deserializer(object):
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
- def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.deserialize_type = {
"iso-8601": Deserializer.deserialize_iso,
"rfc-1123": Deserializer.deserialize_rfc,
@@ -1391,7 +1472,7 @@ def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None):
"duration": (isodate.Duration, datetime.timedelta),
"iso-8601": (datetime.datetime),
}
- self.dependencies: Dict[str, Type[ModelType]] = dict(classes) if classes else {}
+ self.dependencies: Dict[str, type] = dict(classes) if classes else {}
self.key_extractors = [rest_key_extractor, xml_key_extractor]
# Additional properties only works if the "rest_key_extractor" is used to
# extract the keys. Making it to work whatever the key extractor is too much
@@ -1409,11 +1490,12 @@ def __call__(self, target_obj, response_data, content_type=None):
:param str content_type: Swagger "produces" if available.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
data = self._unpack_content(response_data, content_type)
return self._deserialize(target_obj, data)
- def _deserialize(self, target_obj, data):
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
"""Call the deserializer on a model.
Data needs to be already deserialized as JSON or XML ElementTree
@@ -1422,12 +1504,13 @@ def _deserialize(self, target_obj, data):
:param object data: Object to deserialize.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
# This is already a model, go recursive just in case
if hasattr(data, "_attribute_map"):
constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
try:
- for attr, mapconfig in data._attribute_map.items():
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
if attr in constants:
continue
value = getattr(data, attr)
@@ -1444,15 +1527,15 @@ def _deserialize(self, target_obj, data):
response, class_name = self._classify_target(target_obj, data)
- if isinstance(response, basestring):
+ if isinstance(response, str):
return self.deserialize_data(data, response)
- elif isinstance(response, type) and issubclass(response, Enum):
+ if isinstance(response, type) and issubclass(response, Enum):
return self.deserialize_enum(data, response)
- if data is None:
+ if data is None or data is CoreNull:
return data
try:
- attributes = response._attribute_map # type: ignore
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
d_attrs = {}
for attr, attr_desc in attributes.items():
# Check empty string. If it's not empty, someone has a real "additionalProperties"...
@@ -1481,10 +1564,9 @@ def _deserialize(self, target_obj, data):
d_attrs[attr] = value
except (AttributeError, TypeError, KeyError) as err:
msg = "Unable to deserialize to object: " + class_name # type: ignore
- raise_with_traceback(DeserializationError, msg, err)
- else:
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
+ raise DeserializationError(msg) from err
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
def _build_additional_properties(self, attribute_map, data):
if not self.additional_properties_detection:
@@ -1511,18 +1593,20 @@ def _classify_target(self, target, data):
:param str target: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
"""
if target is None:
return None, None
- if isinstance(target, basestring):
+ if isinstance(target, str):
try:
target = self.dependencies[target]
except KeyError:
return target, target
try:
- target = target._classify(data, self.dependencies)
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
except AttributeError:
pass # Target is not a Model, no classify
return target, target.__class__.__name__ # type: ignore
@@ -1537,10 +1621,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None):
:param str target_obj: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
:param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
"""
try:
return self(target_obj, data, content_type=content_type)
- except:
+ except: # pylint: disable=bare-except
_LOGGER.debug(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -1558,10 +1644,12 @@ def _unpack_content(raw_data, content_type=None):
If raw_data is something else, bypass all logic and return it directly.
- :param raw_data: Data to be processed.
- :param content_type: How to parse if raw_data is a string/bytes.
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
:raises JSONDecodeError: If JSON is requested and parsing is impossible.
:raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
"""
# Assume this is enough to detect a Pipeline Response without importing it
context = getattr(raw_data, "context", {})
@@ -1578,31 +1666,42 @@ def _unpack_content(raw_data, content_type=None):
if hasattr(raw_data, "_content_consumed"):
return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers)
- if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"):
+ if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"):
return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore
return raw_data
def _instantiate_model(self, response, attrs, additional_properties=None):
"""Instantiate a response model passing in deserialized args.
- :param response: The response model class.
- :param d_attrs: The deserialized response attributes.
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
"""
if callable(response):
subtype = getattr(response, "_subtype_map", {})
try:
- readonly = [k for k, v in response._validation.items() if v.get("readonly")]
- const = [k for k, v in response._validation.items() if v.get("constant")]
+ readonly = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("readonly")
+ ]
+ const = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("constant")
+ ]
kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
response_obj = response(**kwargs)
for attr in readonly:
setattr(response_obj, attr, attrs.get(attr))
if additional_properties:
- response_obj.additional_properties = additional_properties
+ response_obj.additional_properties = additional_properties # type: ignore
return response_obj
except TypeError as err:
msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err))
+ raise DeserializationError(msg + str(err)) from err
else:
try:
for attr, value in attrs.items():
@@ -1611,15 +1710,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
except Exception as exp:
msg = "Unable to populate response model. "
msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg)
+ raise DeserializationError(msg) from exp
- def deserialize_data(self, data, data_type):
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
"""Process data for deserialization according to data type.
:param str data: The response string to be deserialized.
:param str data_type: The type to deserialize to.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
if data is None:
return data
@@ -1633,7 +1733,11 @@ def deserialize_data(self, data, data_type):
if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
return data
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
return None
data_val = self.deserialize_type[data_type](data)
@@ -1652,15 +1756,15 @@ def deserialize_data(self, data, data_type):
except (ValueError, TypeError, AttributeError) as err:
msg = "Unable to deserialize response data."
msg += " Data: {}, {}".format(data, data_type)
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return self._deserialize(obj_type, data)
+ raise DeserializationError(msg) from err
+ return self._deserialize(obj_type, data)
def deserialize_iter(self, attr, iter_type):
"""Deserialize an iterable.
:param list attr: Iterable to be deserialized.
:param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
:rtype: list
"""
if attr is None:
@@ -1677,6 +1781,7 @@ def deserialize_dict(self, attr, dict_type):
:param dict/list attr: Dictionary to be deserialized. Also accepts
a list of key, value pairs.
:param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
:rtype: dict
"""
if isinstance(attr, list):
@@ -1687,11 +1792,12 @@ def deserialize_dict(self, attr, dict_type):
attr = {el.tag: el.text for el in attr}
return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
- def deserialize_object(self, attr, **kwargs):
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Deserialize a generic object.
This will be handled as a dictionary.
:param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
:rtype: dict
:raises: TypeError if non-builtin datatype encountered.
"""
@@ -1700,7 +1806,7 @@ def deserialize_object(self, attr, **kwargs):
if isinstance(attr, ET.Element):
# Do no recurse on XML, just return the tree as-is
return attr
- if isinstance(attr, basestring):
+ if isinstance(attr, str):
return self.deserialize_basic(attr, "str")
obj_type = type(attr)
if obj_type in self.basic_types:
@@ -1726,11 +1832,10 @@ def deserialize_object(self, attr, **kwargs):
pass
return deserialized
- else:
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
- def deserialize_basic(self, attr, data_type):
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
"""Deserialize basic builtin data type from string.
Will attempt to convert to str, int, float and bool.
This function will also accept '1', '0', 'true' and 'false' as
@@ -1738,6 +1843,7 @@ def deserialize_basic(self, attr, data_type):
:param str attr: response string to be deserialized.
:param str data_type: deserialization data type.
+ :return: Deserialized basic type.
:rtype: str, int, float or bool
:raises: TypeError if string format is not valid.
"""
@@ -1749,24 +1855,23 @@ def deserialize_basic(self, attr, data_type):
if data_type == "str":
# None or '', node is empty string.
return ""
- else:
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
if data_type == "bool":
if attr in [True, False, 1, 0]:
return bool(attr)
- elif isinstance(attr, basestring):
+ if isinstance(attr, str):
if attr.lower() in ["true", "1"]:
return True
- elif attr.lower() in ["false", "0"]:
+ if attr.lower() in ["false", "0"]:
return False
raise TypeError("Invalid boolean value: {}".format(attr))
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
@staticmethod
def deserialize_unicode(data):
@@ -1774,6 +1879,7 @@ def deserialize_unicode(data):
as a string.
:param str data: response string to be deserialized.
+ :return: Deserialized string.
:rtype: str or unicode
"""
# We might be here because we have an enum modeled as string,
@@ -1787,8 +1893,7 @@ def deserialize_unicode(data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
@staticmethod
def deserialize_enum(data, enum_obj):
@@ -1800,6 +1905,7 @@ def deserialize_enum(data, enum_obj):
:param str data: Response string to be deserialized. If this value is
None or invalid it will be returned as-is.
:param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
:rtype: Enum
"""
if isinstance(data, enum_obj) or data is None:
@@ -1808,12 +1914,11 @@ def deserialize_enum(data, enum_obj):
data = data.value
if isinstance(data, int):
# Workaround. We might consider remove it in the future.
- # https://github.com/Azure/azure-rest-api-specs/issues/141
try:
return list(enum_obj.__members__.values())[data]
- except IndexError:
+ except IndexError as exc:
error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj))
+ raise DeserializationError(error.format(data, enum_obj)) from exc
try:
return enum_obj(str(data))
except ValueError:
@@ -1829,6 +1934,7 @@ def deserialize_bytearray(attr):
"""Deserialize string into bytearray.
:param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1841,6 +1947,7 @@ def deserialize_base64(attr):
"""Deserialize base64 encoded string into string.
:param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1856,22 +1963,24 @@ def deserialize_decimal(attr):
"""Deserialize string into Decimal object.
:param str attr: response string to be deserialized.
- :rtype: Decimal
+ :return: Deserialized decimal
:raises: DeserializationError if string format invalid.
+ :rtype: decimal
"""
if isinstance(attr, ET.Element):
attr = attr.text
try:
- return decimal.Decimal(attr) # type: ignore
+ return decimal.Decimal(str(attr)) # type: ignore
except decimal.DecimalException as err:
msg = "Invalid decimal {}".format(attr)
- raise_with_traceback(DeserializationError, msg, err)
+ raise DeserializationError(msg) from err
@staticmethod
def deserialize_long(attr):
"""Deserialize string into long (Py2) or int (Py3).
:param str attr: response string to be deserialized.
+ :return: Deserialized int
:rtype: long or int
:raises: ValueError if string format invalid.
"""
@@ -1884,6 +1993,7 @@ def deserialize_duration(attr):
"""Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
+ :return: Deserialized duration
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
"""
@@ -1893,15 +2003,15 @@ def deserialize_duration(attr):
duration = isodate.parse_duration(attr)
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return duration
+ raise DeserializationError(msg) from err
+ return duration
@staticmethod
def deserialize_date(attr):
"""Deserialize ISO-8601 formatted string into Date object.
:param str attr: response string to be deserialized.
+ :return: Deserialized date
:rtype: Date
:raises: DeserializationError if string format invalid.
"""
@@ -1910,13 +2020,14 @@ def deserialize_date(attr):
if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
# This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
- return isodate.parse_date(attr, defaultmonth=None, defaultday=None)
+ return isodate.parse_date(attr, defaultmonth=0, defaultday=0)
@staticmethod
def deserialize_time(attr):
"""Deserialize ISO-8601 formatted string into time object.
:param str attr: response string to be deserialized.
+ :return: Deserialized time
:rtype: datetime.time
:raises: DeserializationError if string format invalid.
"""
@@ -1931,6 +2042,7 @@ def deserialize_rfc(attr):
"""Deserialize RFC-1123 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1945,15 +2057,15 @@ def deserialize_rfc(attr):
date_obj = date_obj.astimezone(tz=TZ_UTC)
except ValueError as err:
msg = "Cannot deserialize to rfc datetime object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return date_obj
+ raise DeserializationError(msg) from err
+ return date_obj
@staticmethod
def deserialize_iso(attr):
"""Deserialize ISO-8601 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1982,9 +2094,8 @@ def deserialize_iso(attr):
raise OverflowError("Hit max or min date")
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize datetime object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return date_obj
+ raise DeserializationError(msg) from err
+ return date_obj
@staticmethod
def deserialize_unix(attr):
@@ -1992,15 +2103,16 @@ def deserialize_unix(attr):
This is represented as seconds.
:param int attr: Object to be serialized.
+ :return: Deserialized datetime
:rtype: Datetime
:raises: DeserializationError if format invalid
"""
if isinstance(attr, ET.Element):
attr = int(attr.text) # type: ignore
try:
+ attr = int(attr)
date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC)
except ValueError as err:
msg = "Cannot deserialize to unix datetime object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return date_obj
+ raise DeserializationError(msg) from err
+ return date_obj
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_vendor.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_vendor.py
deleted file mode 100644
index 0dafe0e287ff..000000000000
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_vendor.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-
-from azure.core.pipeline.transport import HttpRequest
-
-
-def _convert_request(request, files=None):
- data = request.content if not files else None
- request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data)
- if files:
- request.set_formdata_body(files)
- return request
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_version.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_version.py
index c47f66669f1b..59deb8c7263b 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_version.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/_version.py
@@ -6,4 +6,4 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-VERSION = "1.0.0"
+VERSION = "1.1.0"
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/__init__.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/__init__.py
index b75bc89845c2..4ce20cbe56f0 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/__init__.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/__init__.py
@@ -5,12 +5,18 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._resource_connector_mgmt_client import ResourceConnectorMgmtClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._resource_connector_mgmt_client import ResourceConnectorMgmtClient # type: ignore
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -18,6 +24,6 @@
__all__ = [
"ResourceConnectorMgmtClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/_configuration.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/_configuration.py
index 5af8b7e6819f..cf4a7ce92d0b 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/_configuration.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/_configuration.py
@@ -8,18 +8,16 @@
from typing import Any, TYPE_CHECKING
-from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy
from .._version import VERSION
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
-class ResourceConnectorMgmtClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
+class ResourceConnectorMgmtClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for ResourceConnectorMgmtClient.
Note that all parameters used to create this instance are saved as instance
@@ -35,7 +33,6 @@ class ResourceConnectorMgmtClientConfiguration(Configuration): # pylint: disabl
"""
def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None:
- super(ResourceConnectorMgmtClientConfiguration, self).__init__(**kwargs)
api_version: str = kwargs.pop("api_version", "2022-10-27")
if credential is None:
@@ -48,6 +45,7 @@ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **k
self.api_version = api_version
self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"])
kwargs.setdefault("sdk_moniker", "mgmt-resourceconnector/{}".format(VERSION))
+ self.polling_interval = kwargs.get("polling_interval", 30)
self._configure(**kwargs)
def _configure(self, **kwargs: Any) -> None:
@@ -56,9 +54,9 @@ def _configure(self, **kwargs: Any) -> None:
self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs)
- self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs)
+ self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs)
self.authentication_policy = kwargs.get("authentication_policy")
if self.credential and not self.authentication_policy:
self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/_resource_connector_mgmt_client.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/_resource_connector_mgmt_client.py
index 99ada5481c7b..ced3e67dab3a 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/_resource_connector_mgmt_client.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/_resource_connector_mgmt_client.py
@@ -8,9 +8,12 @@
from copy import deepcopy
from typing import Any, Awaitable, TYPE_CHECKING
+from typing_extensions import Self
+from azure.core.pipeline import policies
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
+from azure.mgmt.core.policies import AsyncARMAutoResourceProviderRegistrationPolicy
from .. import models as _models
from .._serialization import Deserializer, Serializer
@@ -18,11 +21,10 @@
from .operations import AppliancesOperations
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
-class ResourceConnectorMgmtClient: # pylint: disable=client-accepts-api-version-keyword
+class ResourceConnectorMgmtClient:
"""The appliances Rest API spec.
:ivar appliances: AppliancesOperations operations
@@ -50,7 +52,25 @@ def __init__(
self._config = ResourceConnectorMgmtClientConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
- self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+ _policies = kwargs.pop("policies", None)
+ if _policies is None:
+ _policies = [
+ policies.RequestIdPolicy(**kwargs),
+ self._config.headers_policy,
+ self._config.user_agent_policy,
+ self._config.proxy_policy,
+ policies.ContentDecodePolicy(**kwargs),
+ AsyncARMAutoResourceProviderRegistrationPolicy(),
+ self._config.redirect_policy,
+ self._config.retry_policy,
+ self._config.authentication_policy,
+ self._config.custom_hook_policy,
+ self._config.logging_policy,
+ policies.DistributedTracingPolicy(**kwargs),
+ policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None,
+ self._config.http_logging_policy,
+ ]
+ self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, policies=_policies, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
@@ -58,7 +78,9 @@ def __init__(
self._serialize.client_side_validation = False
self.appliances = AppliancesOperations(self._client, self._config, self._serialize, self._deserialize)
- def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
+ def _send_request(
+ self, request: HttpRequest, *, stream: bool = False, **kwargs: Any
+ ) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
@@ -78,12 +100,12 @@ def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncH
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
- return self._client.send_request(request_copy, **kwargs)
+ return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore
async def close(self) -> None:
await self._client.close()
- async def __aenter__(self) -> "ResourceConnectorMgmtClient":
+ async def __aenter__(self) -> Self:
await self._client.__aenter__()
return self
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/operations/__init__.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/operations/__init__.py
index fe93dbc9b033..049caa72ac44 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/operations/__init__.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/operations/__init__.py
@@ -5,15 +5,21 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._appliances_operations import AppliancesOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._appliances_operations import AppliancesOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"AppliancesOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/operations/_appliances_operations.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/operations/_appliances_operations.py
index 99b017339467..230a9ffe0582 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/operations/_appliances_operations.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/aio/operations/_appliances_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,8 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -17,12 +17,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -30,7 +31,6 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._appliances_operations import (
build_create_or_update_request,
build_delete_request,
@@ -45,6 +45,10 @@
build_update_request,
)
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -72,7 +76,6 @@ def __init__(self, *args, **kwargs) -> None:
def list_operations(self, **kwargs: Any) -> AsyncIterable["_models.ApplianceOperation"]:
"""Lists all available Appliances operations.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplianceOperation or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resourceconnector.models.ApplianceOperation]
@@ -84,7 +87,7 @@ def list_operations(self, **kwargs: Any) -> AsyncIterable["_models.ApplianceOper
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ApplianceOperationsList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -95,14 +98,12 @@ def list_operations(self, **kwargs: Any) -> AsyncIterable["_models.ApplianceOper
def prepare_request(next_link=None):
if not next_link:
- request = build_list_operations_request(
+ _request = build_list_operations_request(
api_version=api_version,
- template_url=self.list_operations.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -114,13 +115,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ApplianceOperationsList", pipeline_response)
@@ -130,11 +130,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -147,8 +147,6 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list_operations.metadata = {"url": "/providers/Microsoft.ResourceConnector/operations"}
-
@distributed_trace
def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.Appliance"]:
"""Gets a list of Appliances in a subscription.
@@ -156,7 +154,6 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.Applianc
Gets a list of Appliances in the specified subscription. The operation returns properties of
each Appliance.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Appliance or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resourceconnector.models.Appliance]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -167,7 +164,7 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.Applianc
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ApplianceListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -178,15 +175,13 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.Applianc
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_subscription_request(
+ _request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_subscription.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -198,13 +193,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ApplianceListResult", pipeline_response)
@@ -214,11 +208,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -231,22 +225,17 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list_by_subscription.metadata = {
- "url": "/subscriptions/{subscriptionId}/providers/Microsoft.ResourceConnector/appliances"
- }
-
@distributed_trace_async
async def get_telemetry_config(self, **kwargs: Any) -> _models.ApplianceGetTelemetryConfigResult:
"""Gets the telemetry config.
Gets the telemetry config.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplianceGetTelemetryConfigResult or the result of cls(response)
:rtype: ~azure.mgmt.resourceconnector.models.ApplianceGetTelemetryConfigResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -260,19 +249,17 @@ async def get_telemetry_config(self, **kwargs: Any) -> _models.ApplianceGetTelem
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ApplianceGetTelemetryConfigResult] = kwargs.pop("cls", None)
- request = build_get_telemetry_config_request(
+ _request = build_get_telemetry_config_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get_telemetry_config.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -282,16 +269,12 @@ async def get_telemetry_config(self, **kwargs: Any) -> _models.ApplianceGetTelem
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ApplianceGetTelemetryConfigResult", pipeline_response)
+ deserialized = self._deserialize("ApplianceGetTelemetryConfigResult", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get_telemetry_config.metadata = {
- "url": "/subscriptions/{subscriptionId}/providers/Microsoft.ResourceConnector/telemetryconfig"
- }
+ return deserialized # type: ignore
@distributed_trace
def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncIterable["_models.Appliance"]:
@@ -303,7 +286,6 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Appliance or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resourceconnector.models.Appliance]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -314,7 +296,7 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ApplianceListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -325,16 +307,14 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_resource_group_request(
+ _request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -346,13 +326,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ApplianceListResult", pipeline_response)
@@ -362,11 +341,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -379,10 +358,6 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list_by_resource_group.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances"
- }
-
@distributed_trace_async
async def get(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> _models.Appliance:
"""Gets an Appliance.
@@ -394,12 +369,11 @@ async def get(self, resource_group_name: str, resource_name: str, **kwargs: Any)
:type resource_group_name: str
:param resource_name: Appliances name. Required.
:type resource_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Appliance or the result of cls(response)
:rtype: ~azure.mgmt.resourceconnector.models.Appliance
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -413,21 +387,19 @@ async def get(self, resource_group_name: str, resource_name: str, **kwargs: Any)
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.Appliance] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -437,21 +409,21 @@ async def get(self, resource_group_name: str, resource_name: str, **kwargs: Any)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("Appliance", pipeline_response)
+ deserialized = self._deserialize("Appliance", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}"
- }
+ return deserialized # type: ignore
async def _create_or_update_initial(
- self, resource_group_name: str, resource_name: str, parameters: Union[_models.Appliance, IO], **kwargs: Any
- ) -> _models.Appliance:
- error_map = {
+ self,
+ resource_group_name: str,
+ resource_name: str,
+ parameters: Union[_models.Appliance, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -464,7 +436,7 @@ async def _create_or_update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.Appliance] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -474,7 +446,7 @@ async def _create_or_update_initial(
else:
_json = self._serialize.body(parameters, "Appliance")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
@@ -482,40 +454,35 @@ async def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("Appliance", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("Appliance", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}"
- }
-
@overload
async def begin_create_or_update(
self,
@@ -540,14 +507,6 @@ async def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Appliance or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.resourceconnector.models.Appliance]
@@ -559,7 +518,7 @@ async def begin_create_or_update(
self,
resource_group_name: str,
resource_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -574,18 +533,10 @@ async def begin_create_or_update(
:param resource_name: Appliances name. Required.
:type resource_name: str
:param parameters: Parameters supplied to create or update an Appliance. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Appliance or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.resourceconnector.models.Appliance]
@@ -594,7 +545,11 @@ async def begin_create_or_update(
@distributed_trace_async
async def begin_create_or_update(
- self, resource_group_name: str, resource_name: str, parameters: Union[_models.Appliance, IO], **kwargs: Any
+ self,
+ resource_group_name: str,
+ resource_name: str,
+ parameters: Union[_models.Appliance, IO[bytes]],
+ **kwargs: Any
) -> AsyncLROPoller[_models.Appliance]:
"""Creates or updates an Appliance.
@@ -606,19 +561,8 @@ async def begin_create_or_update(
:param resource_name: Appliances name. Required.
:type resource_name: str
:param parameters: Parameters supplied to create or update an Appliance. Is either a Appliance
- type or a IO type. Required.
- :type parameters: ~azure.mgmt.resourceconnector.models.Appliance or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.resourceconnector.models.Appliance or IO[bytes]
:return: An instance of AsyncLROPoller that returns either Appliance or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.resourceconnector.models.Appliance]
@@ -645,12 +589,13 @@ async def begin_create_or_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("Appliance", pipeline_response)
+ deserialized = self._deserialize("Appliance", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -663,22 +608,20 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.Appliance].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}"
- }
+ return AsyncLROPoller[_models.Appliance](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
+ async def _delete_initial(
self, resource_group_name: str, resource_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -690,38 +633,41 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> AsyncLROPoller[None]:
@@ -734,14 +680,6 @@ async def begin_delete(self, resource_group_name: str, resource_name: str, **kwa
:type resource_group_name: str
:param resource_name: Appliances name. Required.
:type resource_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -755,7 +693,7 @@ async def begin_delete(self, resource_group_name: str, resource_name: str, **kwa
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
api_version=api_version,
@@ -764,11 +702,12 @@ async def begin_delete(self, resource_group_name: str, resource_name: str, **kwa
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: AsyncPollingMethod = cast(
@@ -780,17 +719,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}"
- }
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace_async
async def update(
@@ -808,12 +743,11 @@ async def update(
:type resource_name: str
:param tags: Resource tags. Default value is None.
:type tags: dict[str, str]
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Appliance or the result of cls(response)
:rtype: ~azure.mgmt.resourceconnector.models.Appliance
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -831,23 +765,21 @@ async def update(
_parameters = _models.PatchableAppliance(tags=tags)
_json = self._serialize.body(_parameters, "PatchableAppliance")
- request = build_update_request(
+ _request = build_update_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
- template_url=self.update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -857,16 +789,12 @@ async def update(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("Appliance", pipeline_response)
+ deserialized = self._deserialize("Appliance", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def list_cluster_user_credential(
@@ -881,12 +809,11 @@ async def list_cluster_user_credential(
:type resource_group_name: str
:param resource_name: Appliances name. Required.
:type resource_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplianceListCredentialResults or the result of cls(response)
:rtype: ~azure.mgmt.resourceconnector.models.ApplianceListCredentialResults
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -900,21 +827,19 @@ async def list_cluster_user_credential(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ApplianceListCredentialResults] = kwargs.pop("cls", None)
- request = build_list_cluster_user_credential_request(
+ _request = build_list_cluster_user_credential_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_cluster_user_credential.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -924,16 +849,12 @@ async def list_cluster_user_credential(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ApplianceListCredentialResults", pipeline_response)
+ deserialized = self._deserialize("ApplianceListCredentialResults", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- list_cluster_user_credential.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}/listClusterUserCredential"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def list_keys(
@@ -951,12 +872,11 @@ async def list_keys(
:param artifact_type: This sets the type of artifact being returned, when empty no artifact
endpoint is returned. Default value is None.
:type artifact_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplianceListKeysResults or the result of cls(response)
:rtype: ~azure.mgmt.resourceconnector.models.ApplianceListKeysResults
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -970,22 +890,20 @@ async def list_keys(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ApplianceListKeysResults] = kwargs.pop("cls", None)
- request = build_list_keys_request(
+ _request = build_list_keys_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
artifact_type=artifact_type,
api_version=api_version,
- template_url=self.list_keys.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -995,16 +913,12 @@ async def list_keys(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ApplianceListKeysResults", pipeline_response)
+ deserialized = self._deserialize("ApplianceListKeysResults", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- list_keys.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}/listkeys"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def get_upgrade_graph(
@@ -1022,12 +936,11 @@ async def get_upgrade_graph(
:type resource_name: str
:param upgrade_graph: Upgrade graph version, ex - stable. Required.
:type upgrade_graph: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: UpgradeGraph or the result of cls(response)
:rtype: ~azure.mgmt.resourceconnector.models.UpgradeGraph
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1041,22 +954,20 @@ async def get_upgrade_graph(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.UpgradeGraph] = kwargs.pop("cls", None)
- request = build_get_upgrade_graph_request(
+ _request = build_get_upgrade_graph_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
upgrade_graph=upgrade_graph,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get_upgrade_graph.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1066,13 +977,9 @@ async def get_upgrade_graph(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("UpgradeGraph", pipeline_response)
+ deserialized = self._deserialize("UpgradeGraph", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get_upgrade_graph.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}/upgradeGraphs/{upgradeGraph}"
- }
+ return deserialized # type: ignore
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/models/__init__.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/models/__init__.py
index 5151e791dfb1..33f263df0698 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/models/__init__.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/models/__init__.py
@@ -5,44 +5,55 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._models_py3 import Appliance
-from ._models_py3 import ApplianceCredentialKubeconfig
-from ._models_py3 import ApplianceGetTelemetryConfigResult
-from ._models_py3 import ApplianceListCredentialResults
-from ._models_py3 import ApplianceListKeysResults
-from ._models_py3 import ApplianceListResult
-from ._models_py3 import ApplianceOperation
-from ._models_py3 import ApplianceOperationsList
-from ._models_py3 import AppliancePropertiesInfrastructureConfig
-from ._models_py3 import ArtifactProfile
-from ._models_py3 import ErrorAdditionalInfo
-from ._models_py3 import ErrorDetail
-from ._models_py3 import ErrorResponse
-from ._models_py3 import HybridConnectionConfig
-from ._models_py3 import Identity
-from ._models_py3 import PatchableAppliance
-from ._models_py3 import Resource
-from ._models_py3 import SSHKey
-from ._models_py3 import SupportedVersion
-from ._models_py3 import SupportedVersionCatalogVersion
-from ._models_py3 import SupportedVersionCatalogVersionData
-from ._models_py3 import SupportedVersionMetadata
-from ._models_py3 import SystemData
-from ._models_py3 import TrackedResource
-from ._models_py3 import UpgradeGraph
-from ._models_py3 import UpgradeGraphProperties
+from typing import TYPE_CHECKING
-from ._resource_connector_mgmt_client_enums import AccessProfileType
-from ._resource_connector_mgmt_client_enums import ArtifactType
-from ._resource_connector_mgmt_client_enums import CreatedByType
-from ._resource_connector_mgmt_client_enums import Distro
-from ._resource_connector_mgmt_client_enums import Provider
-from ._resource_connector_mgmt_client_enums import ResourceIdentityType
-from ._resource_connector_mgmt_client_enums import SSHKeyType
-from ._resource_connector_mgmt_client_enums import Status
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+
+from ._models_py3 import ( # type: ignore
+ Appliance,
+ ApplianceCredentialKubeconfig,
+ ApplianceGetTelemetryConfigResult,
+ ApplianceListCredentialResults,
+ ApplianceListKeysResults,
+ ApplianceListResult,
+ ApplianceOperation,
+ ApplianceOperationsList,
+ AppliancePropertiesInfrastructureConfig,
+ ArtifactProfile,
+ ErrorAdditionalInfo,
+ ErrorDetail,
+ ErrorResponse,
+ HybridConnectionConfig,
+ Identity,
+ PatchableAppliance,
+ Resource,
+ SSHKey,
+ SupportedVersion,
+ SupportedVersionCatalogVersion,
+ SupportedVersionCatalogVersionData,
+ SupportedVersionMetadata,
+ SystemData,
+ TrackedResource,
+ UpgradeGraph,
+ UpgradeGraphProperties,
+)
+
+from ._resource_connector_mgmt_client_enums import ( # type: ignore
+ AccessProfileType,
+ ArtifactType,
+ CreatedByType,
+ Distro,
+ Provider,
+ ResourceIdentityType,
+ SSHKeyType,
+ Status,
+)
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -81,5 +92,5 @@
"SSHKeyType",
"Status",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/models/_models_py3.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/models/_models_py3.py
index 37f2de840208..a9a619429512 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/models/_models_py3.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/models/_models_py3.py
@@ -1,5 +1,4 @@
# coding=utf-8
-# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
@@ -13,7 +12,6 @@
from .. import _serialization
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from .. import models as _models
@@ -23,7 +21,7 @@ class Resource(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -64,10 +62,10 @@ class TrackedResource(Resource):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -112,15 +110,15 @@ def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kw
self.location = location
-class Appliance(TrackedResource): # pylint: disable=too-many-instance-attributes
+class Appliance(TrackedResource):
"""Appliances definition.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -429,7 +427,7 @@ def __init__(self, **kwargs: Any) -> None:
class ApplianceOperationsList(_serialization.Model):
"""Lists of Appliances operations.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar next_link: Next page of operations.
:vartype next_link: str
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/operations/__init__.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/operations/__init__.py
index fe93dbc9b033..049caa72ac44 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/operations/__init__.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/operations/__init__.py
@@ -5,15 +5,21 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._appliances_operations import AppliancesOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._appliances_operations import AppliancesOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"AppliancesOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/operations/_appliances_operations.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/operations/_appliances_operations.py
index 272c6f26599a..8be15f8e5a28 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/operations/_appliances_operations.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/azure/mgmt/resourceconnector/operations/_appliances_operations.py
@@ -7,7 +7,8 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -16,13 +17,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -30,8 +32,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -304,7 +309,7 @@ def build_update_request(
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
-def build_list_cluster_user_credential_request(
+def build_list_cluster_user_credential_request( # pylint: disable=name-too-long
resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
@@ -455,7 +460,6 @@ def __init__(self, *args, **kwargs):
def list_operations(self, **kwargs: Any) -> Iterable["_models.ApplianceOperation"]:
"""Lists all available Appliances operations.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplianceOperation or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.resourceconnector.models.ApplianceOperation]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -466,7 +470,7 @@ def list_operations(self, **kwargs: Any) -> Iterable["_models.ApplianceOperation
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ApplianceOperationsList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -477,14 +481,12 @@ def list_operations(self, **kwargs: Any) -> Iterable["_models.ApplianceOperation
def prepare_request(next_link=None):
if not next_link:
- request = build_list_operations_request(
+ _request = build_list_operations_request(
api_version=api_version,
- template_url=self.list_operations.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -496,13 +498,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("ApplianceOperationsList", pipeline_response)
@@ -512,11 +513,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -529,8 +530,6 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list_operations.metadata = {"url": "/providers/Microsoft.ResourceConnector/operations"}
-
@distributed_trace
def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.Appliance"]:
"""Gets a list of Appliances in a subscription.
@@ -538,7 +537,6 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.Appliance"]:
Gets a list of Appliances in the specified subscription. The operation returns properties of
each Appliance.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Appliance or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.resourceconnector.models.Appliance]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -549,7 +547,7 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.Appliance"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ApplianceListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -560,15 +558,13 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.Appliance"]:
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_subscription_request(
+ _request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_subscription.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -580,13 +576,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("ApplianceListResult", pipeline_response)
@@ -596,11 +591,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -613,22 +608,17 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list_by_subscription.metadata = {
- "url": "/subscriptions/{subscriptionId}/providers/Microsoft.ResourceConnector/appliances"
- }
-
@distributed_trace
def get_telemetry_config(self, **kwargs: Any) -> _models.ApplianceGetTelemetryConfigResult:
"""Gets the telemetry config.
Gets the telemetry config.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplianceGetTelemetryConfigResult or the result of cls(response)
:rtype: ~azure.mgmt.resourceconnector.models.ApplianceGetTelemetryConfigResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -642,19 +632,17 @@ def get_telemetry_config(self, **kwargs: Any) -> _models.ApplianceGetTelemetryCo
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ApplianceGetTelemetryConfigResult] = kwargs.pop("cls", None)
- request = build_get_telemetry_config_request(
+ _request = build_get_telemetry_config_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get_telemetry_config.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -664,16 +652,12 @@ def get_telemetry_config(self, **kwargs: Any) -> _models.ApplianceGetTelemetryCo
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ApplianceGetTelemetryConfigResult", pipeline_response)
+ deserialized = self._deserialize("ApplianceGetTelemetryConfigResult", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get_telemetry_config.metadata = {
- "url": "/subscriptions/{subscriptionId}/providers/Microsoft.ResourceConnector/telemetryconfig"
- }
+ return deserialized # type: ignore
@distributed_trace
def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.Appliance"]:
@@ -685,7 +669,6 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Appliance or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.resourceconnector.models.Appliance]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -696,7 +679,7 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ApplianceListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -707,16 +690,14 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_resource_group_request(
+ _request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -728,13 +709,12 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("ApplianceListResult", pipeline_response)
@@ -744,11 +724,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -761,10 +741,6 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list_by_resource_group.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances"
- }
-
@distributed_trace
def get(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> _models.Appliance:
"""Gets an Appliance.
@@ -776,12 +752,11 @@ def get(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> _m
:type resource_group_name: str
:param resource_name: Appliances name. Required.
:type resource_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Appliance or the result of cls(response)
:rtype: ~azure.mgmt.resourceconnector.models.Appliance
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -795,21 +770,19 @@ def get(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> _m
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.Appliance] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -819,21 +792,21 @@ def get(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> _m
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("Appliance", pipeline_response)
+ deserialized = self._deserialize("Appliance", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}"
- }
+ return deserialized # type: ignore
def _create_or_update_initial(
- self, resource_group_name: str, resource_name: str, parameters: Union[_models.Appliance, IO], **kwargs: Any
- ) -> _models.Appliance:
- error_map = {
+ self,
+ resource_group_name: str,
+ resource_name: str,
+ parameters: Union[_models.Appliance, IO[bytes]],
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -846,7 +819,7 @@ def _create_or_update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.Appliance] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -856,7 +829,7 @@ def _create_or_update_initial(
else:
_json = self._serialize.body(parameters, "Appliance")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
@@ -864,40 +837,35 @@ def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("Appliance", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("Appliance", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}"
- }
-
@overload
def begin_create_or_update(
self,
@@ -922,14 +890,6 @@ def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either Appliance or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.resourceconnector.models.Appliance]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -940,7 +900,7 @@ def begin_create_or_update(
self,
resource_group_name: str,
resource_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -955,18 +915,10 @@ def begin_create_or_update(
:param resource_name: Appliances name. Required.
:type resource_name: str
:param parameters: Parameters supplied to create or update an Appliance. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either Appliance or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.resourceconnector.models.Appliance]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -974,7 +926,11 @@ def begin_create_or_update(
@distributed_trace
def begin_create_or_update(
- self, resource_group_name: str, resource_name: str, parameters: Union[_models.Appliance, IO], **kwargs: Any
+ self,
+ resource_group_name: str,
+ resource_name: str,
+ parameters: Union[_models.Appliance, IO[bytes]],
+ **kwargs: Any
) -> LROPoller[_models.Appliance]:
"""Creates or updates an Appliance.
@@ -986,19 +942,8 @@ def begin_create_or_update(
:param resource_name: Appliances name. Required.
:type resource_name: str
:param parameters: Parameters supplied to create or update an Appliance. Is either a Appliance
- type or a IO type. Required.
- :type parameters: ~azure.mgmt.resourceconnector.models.Appliance or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.resourceconnector.models.Appliance or IO[bytes]
:return: An instance of LROPoller that returns either Appliance or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.resourceconnector.models.Appliance]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1024,12 +969,13 @@ def begin_create_or_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("Appliance", pipeline_response)
+ deserialized = self._deserialize("Appliance", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -1041,22 +987,18 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.Appliance].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}"
- }
+ return LROPoller[_models.Appliance](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- def _delete_initial( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, resource_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ def _delete_initial(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1068,38 +1010,41 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}"
- }
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> LROPoller[None]:
@@ -1112,14 +1057,6 @@ def begin_delete(self, resource_group_name: str, resource_name: str, **kwargs: A
:type resource_group_name: str
:param resource_name: Appliances name. Required.
:type resource_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1133,7 +1070,7 @@ def begin_delete(self, resource_group_name: str, resource_name: str, **kwargs: A
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
api_version=api_version,
@@ -1142,11 +1079,12 @@ def begin_delete(self, resource_group_name: str, resource_name: str, **kwargs: A
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: PollingMethod = cast(
@@ -1157,17 +1095,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}"
- }
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace
def update(
@@ -1185,12 +1119,11 @@ def update(
:type resource_name: str
:param tags: Resource tags. Default value is None.
:type tags: dict[str, str]
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Appliance or the result of cls(response)
:rtype: ~azure.mgmt.resourceconnector.models.Appliance
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1208,23 +1141,21 @@ def update(
_parameters = _models.PatchableAppliance(tags=tags)
_json = self._serialize.body(_parameters, "PatchableAppliance")
- request = build_update_request(
+ _request = build_update_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
- template_url=self.update.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1234,16 +1165,12 @@ def update(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("Appliance", pipeline_response)
+ deserialized = self._deserialize("Appliance", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}"
- }
+ return deserialized # type: ignore
@distributed_trace
def list_cluster_user_credential(
@@ -1258,12 +1185,11 @@ def list_cluster_user_credential(
:type resource_group_name: str
:param resource_name: Appliances name. Required.
:type resource_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplianceListCredentialResults or the result of cls(response)
:rtype: ~azure.mgmt.resourceconnector.models.ApplianceListCredentialResults
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1277,21 +1203,19 @@ def list_cluster_user_credential(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ApplianceListCredentialResults] = kwargs.pop("cls", None)
- request = build_list_cluster_user_credential_request(
+ _request = build_list_cluster_user_credential_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_cluster_user_credential.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1301,16 +1225,12 @@ def list_cluster_user_credential(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ApplianceListCredentialResults", pipeline_response)
+ deserialized = self._deserialize("ApplianceListCredentialResults", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- list_cluster_user_credential.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}/listClusterUserCredential"
- }
+ return deserialized # type: ignore
@distributed_trace
def list_keys(
@@ -1328,12 +1248,11 @@ def list_keys(
:param artifact_type: This sets the type of artifact being returned, when empty no artifact
endpoint is returned. Default value is None.
:type artifact_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplianceListKeysResults or the result of cls(response)
:rtype: ~azure.mgmt.resourceconnector.models.ApplianceListKeysResults
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1347,22 +1266,20 @@ def list_keys(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ApplianceListKeysResults] = kwargs.pop("cls", None)
- request = build_list_keys_request(
+ _request = build_list_keys_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
artifact_type=artifact_type,
api_version=api_version,
- template_url=self.list_keys.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1372,16 +1289,12 @@ def list_keys(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ApplianceListKeysResults", pipeline_response)
+ deserialized = self._deserialize("ApplianceListKeysResults", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- list_keys.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}/listkeys"
- }
+ return deserialized # type: ignore
@distributed_trace
def get_upgrade_graph(
@@ -1399,12 +1312,11 @@ def get_upgrade_graph(
:type resource_name: str
:param upgrade_graph: Upgrade graph version, ex - stable. Required.
:type upgrade_graph: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: UpgradeGraph or the result of cls(response)
:rtype: ~azure.mgmt.resourceconnector.models.UpgradeGraph
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1418,22 +1330,20 @@ def get_upgrade_graph(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.UpgradeGraph] = kwargs.pop("cls", None)
- request = build_get_upgrade_graph_request(
+ _request = build_get_upgrade_graph_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
upgrade_graph=upgrade_graph,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get_upgrade_graph.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1443,13 +1353,9 @@ def get_upgrade_graph(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("UpgradeGraph", pipeline_response)
+ deserialized = self._deserialize("UpgradeGraph", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get_upgrade_graph.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ResourceConnector/appliances/{resourceName}/upgradeGraphs/{upgradeGraph}"
- }
+ return deserialized # type: ignore
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_create_update.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_create_update.py
index 187f30f3ba86..7c8d6286e6b6 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_create_update.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_create_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.resourceconnector import ResourceConnectorMgmtClient
"""
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_delete.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_delete.py
index 5432904c5807..8f80d1e90a4c 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_delete.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.resourceconnector import ResourceConnectorMgmtClient
"""
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_get.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_get.py
index 16f19d3064e3..254d40aff0b9 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_get.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.resourceconnector import ResourceConnectorMgmtClient
"""
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_by_resource_group.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_by_resource_group.py
index ab04e9dec1bd..d3cad625d1bd 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_by_resource_group.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_by_resource_group.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.resourceconnector import ResourceConnectorMgmtClient
"""
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_by_subscription.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_by_subscription.py
index 6f3f086c9eca..8718f8cd7617 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_by_subscription.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_by_subscription.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.resourceconnector import ResourceConnectorMgmtClient
"""
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_cluster_user_credential.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_cluster_user_credential.py
index 251cdefc1f66..ebe0599f2d71 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_cluster_user_credential.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_cluster_user_credential.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.resourceconnector import ResourceConnectorMgmtClient
"""
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_keys.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_keys.py
index 344be9ec4164..e2ecb80bbbe0 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_keys.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_keys.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.resourceconnector import ResourceConnectorMgmtClient
"""
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_operations.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_operations.py
index 1e0598efcccb..11cff1817132 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_operations.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_list_operations.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.resourceconnector import ResourceConnectorMgmtClient
"""
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_patch.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_patch.py
index c10ef463cd72..d8e687ea7a8f 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_patch.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/appliances_patch.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.resourceconnector import ResourceConnectorMgmtClient
"""
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/telemetry_config.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/telemetry_config.py
index 482cf32c3920..774fb8ccc750 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/telemetry_config.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/telemetry_config.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.resourceconnector import ResourceConnectorMgmtClient
"""
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/upgrade_graph.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/upgrade_graph.py
index 3b78480e8aae..ddfe588ef156 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/upgrade_graph.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_samples/upgrade_graph.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.resourceconnector import ResourceConnectorMgmtClient
"""
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_tests/conftest.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_tests/conftest.py
new file mode 100644
index 000000000000..716dd7c4e6f6
--- /dev/null
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_tests/conftest.py
@@ -0,0 +1,39 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import os
+import pytest
+from dotenv import load_dotenv
+from devtools_testutils import (
+ test_proxy,
+ add_general_regex_sanitizer,
+ add_body_key_sanitizer,
+ add_header_regex_sanitizer,
+)
+
+load_dotenv()
+
+
+# For security, please avoid record sensitive identity information in recordings
+@pytest.fixture(scope="session", autouse=True)
+def add_sanitizers(test_proxy):
+ resourceconnectormgmt_subscription_id = os.environ.get(
+ "AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000"
+ )
+ resourceconnectormgmt_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000")
+ resourceconnectormgmt_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000")
+ resourceconnectormgmt_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(
+ regex=resourceconnectormgmt_subscription_id, value="00000000-0000-0000-0000-000000000000"
+ )
+ add_general_regex_sanitizer(regex=resourceconnectormgmt_tenant_id, value="00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(regex=resourceconnectormgmt_client_id, value="00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(regex=resourceconnectormgmt_client_secret, value="00000000-0000-0000-0000-000000000000")
+
+ add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]")
+ add_header_regex_sanitizer(key="Cookie", value="cookie;")
+ add_body_key_sanitizer(json_path="$..access_token", value="access_token")
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_tests/test_resource_connector_mgmt_appliances_operations.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_tests/test_resource_connector_mgmt_appliances_operations.py
new file mode 100644
index 000000000000..650768b03512
--- /dev/null
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_tests/test_resource_connector_mgmt_appliances_operations.py
@@ -0,0 +1,167 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.resourceconnector import ResourceConnectorMgmtClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestResourceConnectorMgmtAppliancesOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(ResourceConnectorMgmtClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_appliances_list_operations(self, resource_group):
+ response = self.client.appliances.list_operations(
+ api_version="2022-10-27",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_appliances_list_by_subscription(self, resource_group):
+ response = self.client.appliances.list_by_subscription(
+ api_version="2022-10-27",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_appliances_get_telemetry_config(self, resource_group):
+ response = self.client.appliances.get_telemetry_config(
+ api_version="2022-10-27",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_appliances_list_by_resource_group(self, resource_group):
+ response = self.client.appliances.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2022-10-27",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_appliances_get(self, resource_group):
+ response = self.client.appliances.get(
+ resource_group_name=resource_group.name,
+ resource_name="str",
+ api_version="2022-10-27",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_appliances_begin_create_or_update(self, resource_group):
+ response = self.client.appliances.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ resource_name="str",
+ parameters={
+ "location": "str",
+ "distro": "AKSEdge",
+ "id": "str",
+ "identity": {"principalId": "str", "tenantId": "str", "type": "str"},
+ "infrastructureConfig": {"provider": "str"},
+ "name": "str",
+ "provisioningState": "str",
+ "publicKey": "str",
+ "status": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ "version": "str",
+ },
+ api_version="2022-10-27",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_appliances_begin_delete(self, resource_group):
+ response = self.client.appliances.begin_delete(
+ resource_group_name=resource_group.name,
+ resource_name="str",
+ api_version="2022-10-27",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_appliances_update(self, resource_group):
+ response = self.client.appliances.update(
+ resource_group_name=resource_group.name,
+ resource_name="str",
+ api_version="2022-10-27",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_appliances_list_cluster_user_credential(self, resource_group):
+ response = self.client.appliances.list_cluster_user_credential(
+ resource_group_name=resource_group.name,
+ resource_name="str",
+ api_version="2022-10-27",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_appliances_list_keys(self, resource_group):
+ response = self.client.appliances.list_keys(
+ resource_group_name=resource_group.name,
+ resource_name="str",
+ api_version="2022-10-27",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_appliances_get_upgrade_graph(self, resource_group):
+ response = self.client.appliances.get_upgrade_graph(
+ resource_group_name=resource_group.name,
+ resource_name="str",
+ upgrade_graph="str",
+ api_version="2022-10-27",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_tests/test_resource_connector_mgmt_appliances_operations_async.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_tests/test_resource_connector_mgmt_appliances_operations_async.py
new file mode 100644
index 000000000000..c0167dfcd0f3
--- /dev/null
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/generated_tests/test_resource_connector_mgmt_appliances_operations_async.py
@@ -0,0 +1,172 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.resourceconnector.aio import ResourceConnectorMgmtClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestResourceConnectorMgmtAppliancesOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(ResourceConnectorMgmtClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_appliances_list_operations(self, resource_group):
+ response = self.client.appliances.list_operations(
+ api_version="2022-10-27",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_appliances_list_by_subscription(self, resource_group):
+ response = self.client.appliances.list_by_subscription(
+ api_version="2022-10-27",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_appliances_get_telemetry_config(self, resource_group):
+ response = await self.client.appliances.get_telemetry_config(
+ api_version="2022-10-27",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_appliances_list_by_resource_group(self, resource_group):
+ response = self.client.appliances.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2022-10-27",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_appliances_get(self, resource_group):
+ response = await self.client.appliances.get(
+ resource_group_name=resource_group.name,
+ resource_name="str",
+ api_version="2022-10-27",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_appliances_begin_create_or_update(self, resource_group):
+ response = await (
+ await self.client.appliances.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ resource_name="str",
+ parameters={
+ "location": "str",
+ "distro": "AKSEdge",
+ "id": "str",
+ "identity": {"principalId": "str", "tenantId": "str", "type": "str"},
+ "infrastructureConfig": {"provider": "str"},
+ "name": "str",
+ "provisioningState": "str",
+ "publicKey": "str",
+ "status": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ "version": "str",
+ },
+ api_version="2022-10-27",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_appliances_begin_delete(self, resource_group):
+ response = await (
+ await self.client.appliances.begin_delete(
+ resource_group_name=resource_group.name,
+ resource_name="str",
+ api_version="2022-10-27",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_appliances_update(self, resource_group):
+ response = await self.client.appliances.update(
+ resource_group_name=resource_group.name,
+ resource_name="str",
+ api_version="2022-10-27",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_appliances_list_cluster_user_credential(self, resource_group):
+ response = await self.client.appliances.list_cluster_user_credential(
+ resource_group_name=resource_group.name,
+ resource_name="str",
+ api_version="2022-10-27",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_appliances_list_keys(self, resource_group):
+ response = await self.client.appliances.list_keys(
+ resource_group_name=resource_group.name,
+ resource_name="str",
+ api_version="2022-10-27",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_appliances_get_upgrade_graph(self, resource_group):
+ response = await self.client.appliances.get_upgrade_graph(
+ resource_group_name=resource_group.name,
+ resource_name="str",
+ upgrade_graph="str",
+ api_version="2022-10-27",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/resourceconnector/azure-mgmt-resourceconnector/setup.py b/sdk/resourceconnector/azure-mgmt-resourceconnector/setup.py
index 62bce4722d62..9f0c3659d4c1 100644
--- a/sdk/resourceconnector/azure-mgmt-resourceconnector/setup.py
+++ b/sdk/resourceconnector/azure-mgmt-resourceconnector/setup.py
@@ -53,11 +53,11 @@
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
"License :: OSI Approved :: MIT License",
],
zip_safe=False,
@@ -74,10 +74,10 @@
"pytyped": ["py.typed"],
},
install_requires=[
- "isodate<1.0.0,>=0.6.1",
- "azure-common~=1.1",
- "azure-mgmt-core>=1.3.2,<2.0.0",
- "typing-extensions>=4.3.0; python_version<'3.8.0'",
+ "isodate>=0.6.1",
+ "typing-extensions>=4.6.0",
+ "azure-common>=1.1",
+ "azure-mgmt-core>=1.3.2",
],
- python_requires=">=3.7",
+ python_requires=">=3.8",
)