diff --git a/sdk/storage/azure-mgmt-storagesync/README.md b/sdk/storage/azure-mgmt-storagesync/README.md
index 66297aec5f11..3a214611e04c 100644
--- a/sdk/storage/azure-mgmt-storagesync/README.md
+++ b/sdk/storage/azure-mgmt-storagesync/README.md
@@ -1,7 +1,7 @@
# Microsoft Azure SDK for Python
This is the Microsoft Azure Storage Sync Client Library.
-This package has been tested with Python 3.7+.
+This package has been tested with Python 3.8+.
For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all).
## _Disclaimer_
@@ -12,7 +12,7 @@ _Azure SDK Python packages support for Python 2.7 has ended 01 January 2022. For
### Prerequisites
-- Python 3.7+ is required to use this package.
+- Python 3.8+ is required to use this package.
- [Azure subscription](https://azure.microsoft.com/free/)
### Install the package
@@ -59,6 +59,3 @@ Code samples for this package can be found at:
If you encounter any bugs or have suggestions, please file an issue in the
[Issues](https://github.com/Azure/azure-sdk-for-python/issues)
section of the project.
-
-
-
diff --git a/sdk/storage/azure-mgmt-storagesync/_meta.json b/sdk/storage/azure-mgmt-storagesync/_meta.json
index 02ce6effd332..2a2509d61921 100644
--- a/sdk/storage/azure-mgmt-storagesync/_meta.json
+++ b/sdk/storage/azure-mgmt-storagesync/_meta.json
@@ -1,11 +1,11 @@
{
- "commit": "5fc05d0f0b15cbf16de942cadce464b495c66a58",
+ "commit": "a9a39b7d3805fee40151671f13c7682e76828d6b",
"repository_url": "https://github.com/Azure/azure-rest-api-specs",
- "autorest": "3.9.2",
+ "autorest": "3.10.2",
"use": [
- "@autorest/python@6.2.1",
- "@autorest/modelerfour@4.24.3"
+ "@autorest/python@6.26.4",
+ "@autorest/modelerfour@4.27.0"
],
- "autorest_command": "autorest specification/storagesync/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.2.1 --use=@autorest/modelerfour@4.24.3 --version=3.9.2 --version-tolerant=False",
+ "autorest_command": "autorest specification/storagesync/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.26.4 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False",
"readme": "specification/storagesync/resource-manager/readme.md"
}
\ No newline at end of file
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/__init__.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/__init__.py
index 2b933670b59c..f65cdedf589d 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/__init__.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/__init__.py
@@ -5,15 +5,21 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._microsoft_storage_sync import MicrosoftStorageSync
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._microsoft_storage_sync import MicrosoftStorageSync # type: ignore
from ._version import VERSION
__version__ = VERSION
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -21,6 +27,6 @@
__all__ = [
"MicrosoftStorageSync",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_configuration.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_configuration.py
index b53b5bcde622..4c0e0f3ebdd0 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_configuration.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_configuration.py
@@ -6,26 +6,18 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-import sys
from typing import Any, TYPE_CHECKING
-from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy
from ._version import VERSION
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
-else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
-
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
-class MicrosoftStorageSyncConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
+class MicrosoftStorageSyncConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for MicrosoftStorageSync.
Note that all parameters used to create this instance are saved as instance
@@ -33,16 +25,15 @@ class MicrosoftStorageSyncConfiguration(Configuration): # pylint: disable=too-m
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials.TokenCredential
- :param subscription_id: The ID of the target subscription. Required.
+ :param subscription_id: The ID of the target subscription. The value must be an UUID. Required.
:type subscription_id: str
- :keyword api_version: Api Version. Default value is "2022-06-01". Note that overriding this
+ :keyword api_version: Api Version. Default value is "2022-09-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None:
- super(MicrosoftStorageSyncConfiguration, self).__init__(**kwargs)
- api_version = kwargs.pop("api_version", "2022-06-01") # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", "2022-09-01")
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
@@ -54,20 +45,18 @@ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs
self.api_version = api_version
self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"])
kwargs.setdefault("sdk_moniker", "mgmt-storagesync/{}".format(VERSION))
+ self.polling_interval = kwargs.get("polling_interval", 30)
self._configure(**kwargs)
- def _configure(
- self, **kwargs # type: Any
- ):
- # type: (...) -> None
+ def _configure(self, **kwargs: Any) -> None:
self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs)
- self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs)
+ self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
self.authentication_policy = kwargs.get("authentication_policy")
if self.credential and not self.authentication_policy:
self.authentication_policy = ARMChallengeAuthenticationPolicy(
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_microsoft_storage_sync.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_microsoft_storage_sync.py
index 42cf94f23990..6295dfbb3736 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_microsoft_storage_sync.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_microsoft_storage_sync.py
@@ -8,11 +8,14 @@
from copy import deepcopy
from typing import Any, TYPE_CHECKING
+from typing_extensions import Self
+from azure.core.pipeline import policies
from azure.core.rest import HttpRequest, HttpResponse
from azure.mgmt.core import ARMPipelineClient
+from azure.mgmt.core.policies import ARMAutoResourceProviderRegistrationPolicy
-from . import models
+from . import models as _models
from ._configuration import MicrosoftStorageSyncConfiguration
from ._serialization import Deserializer, Serializer
from .operations import (
@@ -30,14 +33,11 @@
)
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
-class MicrosoftStorageSync(
- MicrosoftStorageSyncOperationsMixin
-): # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes
- """Microsoft Storage Sync Service API.
+class MicrosoftStorageSync(MicrosoftStorageSyncOperationsMixin): # pylint: disable=too-many-instance-attributes
+ """Microsoft Storage Sync Service API. This belongs to Microsoft.StorageSync Resource Provider.
:ivar operations: Operations operations
:vartype operations: azure.mgmt.storagesync.operations.Operations
@@ -63,11 +63,11 @@ class MicrosoftStorageSync(
:vartype operation_status: azure.mgmt.storagesync.operations.OperationStatusOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials.TokenCredential
- :param subscription_id: The ID of the target subscription. Required.
+ :param subscription_id: The ID of the target subscription. The value must be an UUID. Required.
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
- :keyword api_version: Api Version. Default value is "2022-06-01". Note that overriding this
+ :keyword api_version: Api Version. Default value is "2022-09-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
@@ -84,9 +84,27 @@ def __init__(
self._config = MicrosoftStorageSyncConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
- self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
-
- client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ _policies = kwargs.pop("policies", None)
+ if _policies is None:
+ _policies = [
+ policies.RequestIdPolicy(**kwargs),
+ self._config.headers_policy,
+ self._config.user_agent_policy,
+ self._config.proxy_policy,
+ policies.ContentDecodePolicy(**kwargs),
+ ARMAutoResourceProviderRegistrationPolicy(),
+ self._config.redirect_policy,
+ self._config.retry_policy,
+ self._config.authentication_policy,
+ self._config.custom_hook_policy,
+ self._config.logging_policy,
+ policies.DistributedTracingPolicy(**kwargs),
+ policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None,
+ self._config.http_logging_policy,
+ ]
+ self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, policies=_policies, **kwargs)
+
+ client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
@@ -113,7 +131,7 @@ def __init__(
self._client, self._config, self._serialize, self._deserialize
)
- def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
+ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
@@ -133,17 +151,14 @@ def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
- return self._client.send_request(request_copy, **kwargs)
+ return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore
- def close(self):
- # type: () -> None
+ def close(self) -> None:
self._client.close()
- def __enter__(self):
- # type: () -> MicrosoftStorageSync
+ def __enter__(self) -> Self:
self._client.__enter__()
return self
- def __exit__(self, *exc_details):
- # type: (Any) -> None
+ def __exit__(self, *exc_details: Any) -> None:
self._client.__exit__(*exc_details)
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_serialization.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_serialization.py
index 7c1dedb5133d..ce17d1798ce7 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_serialization.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_serialization.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -24,7 +25,7 @@
#
# --------------------------------------------------------------------------
-# pylint: skip-file
+# pyright: reportUnnecessaryTypeIgnoreComment=false
from base64 import b64decode, b64encode
import calendar
@@ -37,23 +38,37 @@
import re
import sys
import codecs
+from typing import (
+ Dict,
+ Any,
+ cast,
+ Optional,
+ Union,
+ AnyStr,
+ IO,
+ Mapping,
+ Callable,
+ TypeVar,
+ MutableMapping,
+ Type,
+ List,
+)
try:
from urllib import quote # type: ignore
except ImportError:
- from urllib.parse import quote # type: ignore
+ from urllib.parse import quote
import xml.etree.ElementTree as ET
-import isodate
+import isodate # type: ignore
-from typing import Dict, Any, cast, TYPE_CHECKING
-
-from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback
+from azure.core.exceptions import DeserializationError, SerializationError
+from azure.core.serialization import NULL as CoreNull
_BOM = codecs.BOM_UTF8.decode(encoding="utf-8")
-if TYPE_CHECKING:
- from typing import Optional, Union, AnyStr, IO, Mapping
+ModelType = TypeVar("ModelType", bound="Model")
+JSON = MutableMapping[str, Any]
class RawDeserializer:
@@ -65,8 +80,7 @@ class RawDeserializer:
CONTEXT_NAME = "deserialized_data"
@classmethod
- def deserialize_from_text(cls, data, content_type=None):
- # type: (Optional[Union[AnyStr, IO]], Optional[str]) -> Any
+ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any:
"""Decode data according to content-type.
Accept a stream of data as well, but will be load at once in memory for now.
@@ -76,6 +90,8 @@ def deserialize_from_text(cls, data, content_type=None):
:param data: Input, could be bytes or stream (will be decoded with UTF8) or text
:type data: str or bytes or IO
:param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
"""
if hasattr(data, "read"):
# Assume a stream
@@ -97,7 +113,7 @@ def deserialize_from_text(cls, data, content_type=None):
try:
return json.loads(data_as_str)
except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err)
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
elif "xml" in (content_type or []):
try:
@@ -109,7 +125,7 @@ def deserialize_from_text(cls, data, content_type=None):
pass
return ET.fromstring(data_as_str) # nosec
- except ET.ParseError:
+ except ET.ParseError as err:
# It might be because the server has an issue, and returned JSON with
# content-type XML....
# So let's try a JSON load, and if it's still broken
@@ -128,17 +144,23 @@ def _json_attemp(data):
# The function hack is because Py2.7 messes up with exception
# context otherwise.
_LOGGER.critical("Wasn't XML not JSON, failing")
- raise_with_traceback(DeserializationError, "XML is invalid")
+ raise DeserializationError("XML is invalid") from err
+ elif content_type.startswith("text/"):
+ return data_as_str
raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
@classmethod
- def deserialize_from_http_generics(cls, body_bytes, headers):
- # type: (Optional[Union[AnyStr, IO]], Mapping) -> Any
+ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any:
"""Deserialize from HTTP response.
Use bytes and headers to NOT use any requests/aiohttp or whatever
specific implementation.
Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
"""
# Try to use content-type from headers if available
content_type = None
@@ -156,13 +178,6 @@ def deserialize_from_http_generics(cls, body_bytes, headers):
return None
-try:
- basestring # type: ignore
- unicode_str = unicode # type: ignore
-except NameError:
- basestring = str # type: ignore
- unicode_str = str # type: ignore
-
_LOGGER = logging.getLogger(__name__)
try:
@@ -175,20 +190,35 @@ class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
- """UTF offset for UTC is 0."""
+ """UTF offset for UTC is 0.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The offset
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(0)
def tzname(self, dt):
- """Timestamp representation."""
+ """Timestamp representation.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The timestamp representation
+ :rtype: str
+ """
return "Z"
def dst(self, dt):
- """No daylight saving for UTC."""
+ """No daylight saving for UTC.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The daylight saving time
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(hours=1)
try:
- from datetime import timezone as _FixedOffset
+ from datetime import timezone as _FixedOffset # type: ignore
except ImportError: # Python 2.7
class _FixedOffset(datetime.tzinfo): # type: ignore
@@ -197,7 +227,7 @@ class _FixedOffset(datetime.tzinfo): # type: ignore
:param datetime.timedelta offset: offset in timedelta format
"""
- def __init__(self, offset):
+ def __init__(self, offset) -> None:
self.__offset = offset
def utcoffset(self, dt):
@@ -219,31 +249,33 @@ def __getinitargs__(self):
try:
from datetime import timezone
- TZ_UTC = timezone.utc # type: ignore
+ TZ_UTC = timezone.utc
except ImportError:
TZ_UTC = UTC() # type: ignore
_FLATTEN = re.compile(r"(? None:
+ self.additional_properties: Optional[Dict[str, Any]] = {}
+ for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
elif k in self._validation and self._validation[k].get("readonly", False):
@@ -290,43 +329,57 @@ def __init__(self, **kwargs):
else:
setattr(self, k, kwargs[k])
- def __eq__(self, other):
- """Compare objects by comparing all attributes."""
+ def __eq__(self, other: Any) -> bool:
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
- def __ne__(self, other):
- """Compare objects by comparing all attributes."""
+ def __ne__(self, other: Any) -> bool:
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
return not self.__eq__(other)
- def __str__(self):
+ def __str__(self) -> str:
return str(self.__dict__)
@classmethod
- def enable_additional_properties_sending(cls):
+ def enable_additional_properties_sending(cls) -> None:
cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"}
@classmethod
- def is_xml_model(cls):
+ def is_xml_model(cls) -> bool:
try:
- cls._xml_map
+ cls._xml_map # type: ignore
except AttributeError:
return False
return True
@classmethod
def _create_xml_node(cls):
- """Create XML node."""
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
try:
- xml_map = cls._xml_map
+ xml_map = cls._xml_map # type: ignore
except AttributeError:
xml_map = {}
return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None))
- def serialize(self, keep_readonly=False, **kwargs):
- """Return the JSON that would be sent to azure from this model.
+ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
+ """Return the JSON that would be sent to server from this model.
This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`.
@@ -337,10 +390,17 @@ def serialize(self, keep_readonly=False, **kwargs):
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs)
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
- def as_dict(self, keep_readonly=True, key_transformer=attribute_transformer, **kwargs):
- """Return a dict that can be JSONify using json.dump.
+ def as_dict(
+ self,
+ keep_readonly: bool = True,
+ key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer,
+ **kwargs: Any
+ ) -> JSON:
+ """Return a dict that can be serialized using json.dump.
Advanced usage might optionally use a callback as parameter:
@@ -366,12 +426,15 @@ def my_key_transformer(key, attr_desc, value):
If you want XML serialization, you can pass the kwargs is_xml=True.
+ :param bool keep_readonly: If you want to serialize the readonly attributes
:param function key_transformer: A key transformer function.
:returns: A dict JSON compatible object
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs)
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
@classmethod
def _infer_class_models(cls):
@@ -381,25 +444,31 @@ def _infer_class_models(cls):
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
if cls.__name__ not in client_models:
raise ValueError("Not Autorest generated code")
- except Exception:
+ except Exception: # pylint: disable=broad-exception-caught
# Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
client_models = {cls.__name__: cls}
return client_models
@classmethod
- def deserialize(cls, data, content_type=None):
+ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType:
"""Parse a str using the RestAPI syntax and return a model.
:param str data: A str using RestAPI structure. JSON by default.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
- return deserializer(cls.__name__, data, content_type=content_type)
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@classmethod
- def from_dict(cls, data, key_extractors=None, content_type=None):
+ def from_dict(
+ cls: Type[ModelType],
+ data: Any,
+ key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None,
+ content_type: Optional[str] = None,
+ ) -> ModelType:
"""Parse a dict using given key extractor return a model.
By default consider key
@@ -407,13 +476,15 @@ def from_dict(cls, data, key_extractors=None, content_type=None):
and last_rest_key_case_insensitive_extractor)
:param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
- deserializer.key_extractors = (
- [
+ deserializer.key_extractors = ( # type: ignore
+ [ # type: ignore
attribute_key_case_insensitive_extractor,
rest_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
@@ -421,7 +492,7 @@ def from_dict(cls, data, key_extractors=None, content_type=None):
if key_extractors is None
else key_extractors
)
- return deserializer(cls.__name__, data, content_type=content_type)
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@classmethod
def _flatten_subtype(cls, key, objects):
@@ -429,21 +500,25 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects))
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
return result
@classmethod
def _classify(cls, response, objects):
"""Check the class _subtype_map for any child classes.
We want to ignore any inherited _subtype_maps.
- Remove the polymorphic key from the initial data.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
"""
for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
subtype_value = None
if not isinstance(response, ET.Element):
rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
- subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None)
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
else:
subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
if subtype_value:
@@ -453,7 +528,7 @@ def _classify(cls, response, objects):
return cls
flatten_mapping_type = cls._flatten_subtype(subtype_key, objects)
try:
- return objects[flatten_mapping_type[subtype_value]]
+ return objects[flatten_mapping_type[subtype_value]] # type: ignore
except KeyError:
_LOGGER.warning(
"Subtype value %s has no mapping, use base class %s.",
@@ -482,11 +557,13 @@ def _decode_attribute_map_key(key):
inside the received data.
:param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
"""
return key.replace("\\.", ".")
-class Serializer(object):
+class Serializer(object): # pylint: disable=too-many-public-methods
"""Request object model serializer."""
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
@@ -521,7 +598,7 @@ class Serializer(object):
"multiple": lambda x, y: x % y != 0,
}
- def __init__(self, classes=None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.serialize_type = {
"iso-8601": Serializer.serialize_iso,
"rfc-1123": Serializer.serialize_rfc,
@@ -537,17 +614,20 @@ def __init__(self, classes=None):
"[]": self.serialize_iter,
"{}": self.serialize_dict,
}
- self.dependencies = dict(classes) if classes else {}
+ self.dependencies: Dict[str, type] = dict(classes) if classes else {}
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
- def _serialize(self, target_obj, data_type=None, **kwargs):
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
"""Serialize data into a string according to type.
- :param target_obj: The data to be serialized.
+ :param object target_obj: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str, dict
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
"""
key_transformer = kwargs.get("key_transformer", self.key_transformer)
keep_readonly = kwargs.get("keep_readonly", False)
@@ -573,12 +653,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
serialized = {}
if is_xml_model_serialization:
- serialized = target_obj._create_xml_node()
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
try:
- attributes = target_obj._attribute_map
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
for attr, attr_desc in attributes.items():
attr_name = attr
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
continue
if attr_name == "additional_properties" and attr_desc["key"] == "":
@@ -605,62 +687,63 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if xml_desc.get("attr", False):
if xml_ns:
ET.register_namespace(xml_prefix, xml_ns)
- xml_name = "{}{}".format(xml_ns, xml_name)
- serialized.set(xml_name, new_attr)
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+ serialized.set(xml_name, new_attr) # type: ignore
continue
if xml_desc.get("text", False):
- serialized.text = new_attr
+ serialized.text = new_attr # type: ignore
continue
if isinstance(new_attr, list):
- serialized.extend(new_attr)
+ serialized.extend(new_attr) # type: ignore
elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
if "name" not in getattr(orig_attr, "_xml_map", {}):
splitted_tag = new_attr.tag.split("}")
if len(splitted_tag) == 2: # Namespace
new_attr.tag = "}".join([splitted_tag[0], xml_name])
else:
new_attr.tag = xml_name
- serialized.append(new_attr)
+ serialized.append(new_attr) # type: ignore
else: # That's a basic type
# Integrate namespace if necessary
local_node = _create_xml_node(xml_name, xml_prefix, xml_ns)
- local_node.text = unicode_str(new_attr)
- serialized.append(local_node)
+ local_node.text = str(new_attr)
+ serialized.append(local_node) # type: ignore
else: # JSON
- for k in reversed(keys):
- unflattened = {k: new_attr}
- new_attr = unflattened
+ for k in reversed(keys): # type: ignore
+ new_attr = {k: new_attr}
_new_attr = new_attr
_serialized = serialized
- for k in keys:
+ for k in keys: # type: ignore
if k not in _serialized:
- _serialized.update(_new_attr)
- _new_attr = _new_attr[k]
+ _serialized.update(_new_attr) # type: ignore
+ _new_attr = _new_attr[k] # type: ignore
_serialized = _serialized[k]
- except ValueError:
- continue
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
- raise_with_traceback(SerializationError, msg, err)
- else:
- return serialized
+ raise SerializationError(msg) from err
+ return serialized
def body(self, data, data_type, **kwargs):
"""Serialize data intended for a request body.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: dict
:raises: SerializationError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized request body
"""
# Just in case this is a dict
- internal_data_type = data_type.strip("[]{}")
- internal_data_type = self.dependencies.get(internal_data_type, None)
+ internal_data_type_str = data_type.strip("[]{}")
+ internal_data_type = self.dependencies.get(internal_data_type_str, None)
try:
is_xml_model_serialization = kwargs["is_xml"]
except KeyError:
@@ -675,7 +758,7 @@ def body(self, data, data_type, **kwargs):
# We're not able to deal with additional properties for now.
deserializer.additional_properties_detection = False
if is_xml_model_serialization:
- deserializer.key_extractors = [
+ deserializer.key_extractors = [ # type: ignore
attribute_key_case_insensitive_extractor,
]
else:
@@ -684,18 +767,20 @@ def body(self, data, data_type, **kwargs):
attribute_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
]
- data = deserializer._deserialize(data_type, data)
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
except DeserializationError as err:
- raise_with_traceback(SerializationError, "Unable to build a model: " + str(err), err)
+ raise SerializationError("Unable to build a model: " + str(err)) from err
return self._serialize(data, data_type, **kwargs)
def url(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL path.
- :param data: The data to be serialized.
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
+ :returns: The serialized URL path
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
"""
@@ -706,30 +791,30 @@ def url(self, name, data, data_type, **kwargs):
if kwargs.get("skip_quote") is True:
output = str(output)
+ output = output.replace("{", quote("{")).replace("}", quote("}"))
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return output
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
def query(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL query.
- :param data: The data to be serialized.
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :rtype: str
+ :rtype: str, list
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized query parameter
"""
try:
# Treat the list aside, since we don't want to encode the div separator
if data_type.startswith("["):
internal_data_type = data_type[1:-1]
- data = [self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" for d in data]
- if not kwargs.get("skip_quote", False):
- data = [quote(str(d), safe="") for d in data]
- return str(self.serialize_iter(data, internal_data_type, **kwargs))
+ do_quote = not kwargs.get("skip_quote", False)
+ return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)
# Not a list, regular serialization
output = self.serialize_data(data, data_type, **kwargs)
@@ -739,19 +824,20 @@ def query(self, name, data, data_type, **kwargs):
output = str(output)
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def header(self, name, data, data_type, **kwargs):
"""Serialize data intended for a request header.
- :param data: The data to be serialized.
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized header
"""
try:
if data_type in ["[str]"]:
@@ -760,30 +846,31 @@ def header(self, name, data, data_type, **kwargs):
output = self.serialize_data(data, data_type, **kwargs)
if data_type == "bool":
output = json.dumps(output)
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def serialize_data(self, data, data_type, **kwargs):
"""Serialize generic data according to supplied data type.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :param bool required: Whether it's essential that the data not be
- empty or None
:raises: AttributeError if required data is None.
:raises: ValueError if data is None
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
"""
if data is None:
raise ValueError("No value for given attribute")
try:
+ if data is CoreNull:
+ return None
if data_type in self.basic_types.values():
return self.serialize_basic(data, data_type, **kwargs)
- elif data_type in self.serialize_type:
+ if data_type in self.serialize_type:
return self.serialize_type[data_type](data, **kwargs)
# If dependencies is empty, try with current data class
@@ -798,12 +885,11 @@ def serialize_data(self, data, data_type, **kwargs):
except (ValueError, TypeError) as err:
msg = "Unable to serialize value: {!r} as type: {!r}."
- raise_with_traceback(SerializationError, msg.format(data, data_type), err)
- else:
- return self._serialize(data, **kwargs)
+ raise SerializationError(msg.format(data, data_type)) from err
+ return self._serialize(data, **kwargs)
@classmethod
- def _get_custom_serializers(cls, data_type, **kwargs):
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
if custom_serializer:
return custom_serializer
@@ -819,23 +905,26 @@ def serialize_basic(cls, data, data_type, **kwargs):
- basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- is_xml bool : If set, use xml_basic_types_serializers
- :param data: Object to be serialized.
+ :param obj data: Object to be serialized.
:param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
@classmethod
def serialize_unicode(cls, data):
"""Special handling for serializing unicode strings in Py2.
Encode to UTF-8 if unicode, otherwise handle as a str.
- :param data: Object to be serialized.
+ :param str data: Object to be serialized.
:rtype: str
+ :return: serialized object
"""
try: # If I received an enum, return its value
return data.value
@@ -843,14 +932,13 @@ def serialize_unicode(cls, data):
pass
try:
- if isinstance(data, unicode):
+ if isinstance(data, unicode): # type: ignore
# Don't change it, JSON and XML ElementTree are totally able
# to serialize correctly u'' strings
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
def serialize_iter(self, data, iter_type, div=None, **kwargs):
"""Serialize iterable.
@@ -860,13 +948,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
serialization_ctxt['type'] should be same as data_type.
- is_xml bool : If set, serialize as XML
- :param list attr: Object to be serialized.
+ :param list data: Object to be serialized.
:param str iter_type: Type of object in the iterable.
- :param bool required: Whether the objects in the iterable must
- not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
+ Defaults to False.
:rtype: list, str
+ :return: serialized iterable
"""
if isinstance(data, str):
raise SerializationError("Refuse str type as a valid iter type.")
@@ -878,9 +966,14 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
for d in data:
try:
serialized.append(self.serialize_data(d, iter_type, **kwargs))
- except ValueError:
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
serialized.append(None)
+ if kwargs.get("do_quote", False):
+ serialized = ["" if s is None else quote(str(s), safe="") for s in serialized]
+
if div:
serialized = ["" if s is None else str(s) for s in serialized]
serialized = div.join(serialized)
@@ -916,16 +1009,17 @@ def serialize_dict(self, attr, dict_type, **kwargs):
:param dict attr: Object to be serialized.
:param str dict_type: Type of object in the dictionary.
- :param bool required: Whether the objects in the dictionary must
- not be None or empty.
:rtype: dict
+ :return: serialized dictionary
"""
serialization_ctxt = kwargs.get("serialization_ctxt", {})
serialized = {}
for key, value in attr.items():
try:
serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs)
- except ValueError:
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
serialized[self.serialize_unicode(key)] = None
if "xml" in serialization_ctxt:
@@ -940,7 +1034,7 @@ def serialize_dict(self, attr, dict_type, **kwargs):
return serialized
- def serialize_object(self, attr, **kwargs):
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Serialize a generic object.
This will be handled as a dictionary. If object passed in is not
a basic type (str, int, float, dict, list) it will simply be
@@ -948,6 +1042,7 @@ def serialize_object(self, attr, **kwargs):
:param dict attr: Object to be serialized.
:rtype: dict or str
+ :return: serialized object
"""
if attr is None:
return None
@@ -958,7 +1053,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs)
if obj_type is _long_type:
return self.serialize_long(attr)
- if obj_type is unicode_str:
+ if obj_type is str:
return self.serialize_unicode(attr)
if obj_type is datetime.datetime:
return self.serialize_iso(attr)
@@ -972,7 +1067,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_decimal(attr)
# If it's a model or I know this dependency, serialize as a Model
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
return self._serialize(attr)
if obj_type == dict:
@@ -1001,58 +1096,63 @@ def serialize_enum(attr, enum_obj=None):
except AttributeError:
result = attr
try:
- enum_obj(result)
+ enum_obj(result) # type: ignore
return result
- except ValueError:
- for enum_value in enum_obj:
+ except ValueError as exc:
+ for enum_value in enum_obj: # type: ignore
if enum_value.value.lower() == str(attr).lower():
return enum_value.value
error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj))
+ raise SerializationError(error.format(attr, enum_obj)) from exc
@staticmethod
- def serialize_bytearray(attr, **kwargs):
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize bytearray into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
return b64encode(attr).decode()
@staticmethod
- def serialize_base64(attr, **kwargs):
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize str into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
encoded = b64encode(attr).decode("ascii")
return encoded.strip("=").replace("+", "-").replace("/", "_")
@staticmethod
- def serialize_decimal(attr, **kwargs):
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Decimal object to float.
- :param attr: Object to be serialized.
+ :param decimal attr: Object to be serialized.
:rtype: float
+ :return: serialized decimal
"""
return float(attr)
@staticmethod
- def serialize_long(attr, **kwargs):
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize long (Py2) or int (Py3).
- :param attr: Object to be serialized.
+ :param int attr: Object to be serialized.
:rtype: int/long
+ :return: serialized long
"""
return _long_type(attr)
@staticmethod
- def serialize_date(attr, **kwargs):
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Date object into ISO-8601 formatted string.
:param Date attr: Object to be serialized.
:rtype: str
+ :return: serialized date
"""
if isinstance(attr, str):
attr = isodate.parse_date(attr)
@@ -1060,11 +1160,12 @@ def serialize_date(attr, **kwargs):
return t
@staticmethod
- def serialize_time(attr, **kwargs):
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Time object into ISO-8601 formatted string.
:param datetime.time attr: Object to be serialized.
:rtype: str
+ :return: serialized time
"""
if isinstance(attr, str):
attr = isodate.parse_time(attr)
@@ -1074,30 +1175,32 @@ def serialize_time(attr, **kwargs):
return t
@staticmethod
- def serialize_duration(attr, **kwargs):
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize TimeDelta object into ISO-8601 formatted string.
:param TimeDelta attr: Object to be serialized.
:rtype: str
+ :return: serialized duration
"""
if isinstance(attr, str):
attr = isodate.parse_duration(attr)
return isodate.duration_isoformat(attr)
@staticmethod
- def serialize_rfc(attr, **kwargs):
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into RFC-1123 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: TypeError if format invalid.
+ :return: serialized rfc
"""
try:
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
utc = attr.utctimetuple()
- except AttributeError:
- raise TypeError("RFC1123 object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
Serializer.days[utc.tm_wday],
@@ -1110,12 +1213,13 @@ def serialize_rfc(attr, **kwargs):
)
@staticmethod
- def serialize_iso(attr, **kwargs):
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: SerializationError if format invalid.
+ :return: serialized iso
"""
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
@@ -1135,19 +1239,20 @@ def serialize_iso(attr, **kwargs):
return date + microseconds + "Z"
except (ValueError, OverflowError) as err:
msg = "Unable to serialize datetime object."
- raise_with_traceback(SerializationError, msg, err)
+ raise SerializationError(msg) from err
except AttributeError as err:
msg = "ISO-8601 object must be valid Datetime object."
- raise_with_traceback(TypeError, msg, err)
+ raise TypeError(msg) from err
@staticmethod
- def serialize_unix(attr, **kwargs):
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into IntTime format.
This is represented as seconds.
:param Datetime attr: Object to be serialized.
:rtype: int
:raises: SerializationError if format invalid
+ :return: serialied unix
"""
if isinstance(attr, int):
return attr
@@ -1155,16 +1260,17 @@ def serialize_unix(attr, **kwargs):
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError:
- raise TypeError("Unix time object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
-def rest_key_extractor(attr, attr_desc, data):
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
key = attr_desc["key"]
working_data = data
while "." in key:
- dict_keys = _FLATTEN.split(key)
+ # Need the cast, as for some reasons "split" is typed as list[str | Any]
+ dict_keys = cast(List[str], _FLATTEN.split(key))
if len(dict_keys) == 1:
key = _decode_attribute_map_key(dict_keys[0])
break
@@ -1173,14 +1279,15 @@ def rest_key_extractor(attr, attr_desc, data):
if working_data is None:
# If at any point while following flatten JSON path see None, it means
# that all properties under are None as well
- # https://github.com/Azure/msrest-for-python/issues/197
return None
key = ".".join(dict_keys[1:])
return working_data.get(key)
-def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
key = attr_desc["key"]
working_data = data
@@ -1194,7 +1301,6 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
if working_data is None:
# If at any point while following flatten JSON path see None, it means
# that all properties under are None as well
- # https://github.com/Azure/msrest-for-python/issues/197
return None
key = ".".join(dict_keys[1:])
@@ -1202,17 +1308,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
return attribute_key_case_insensitive_extractor(key, None, working_data)
-def last_rest_key_extractor(attr, attr_desc, data):
- """Extract the attribute in "data" based on the last part of the JSON path key."""
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
return attribute_key_extractor(dict_keys[-1], None, data)
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
"""Extract the attribute in "data" based on the last part of the JSON path key.
This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
"""
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
@@ -1245,11 +1363,11 @@ def _extract_name_from_internal_type(internal_type):
xml_name = internal_type_xml_map.get("name", internal_type.__name__)
xml_ns = internal_type_xml_map.get("ns", None)
if xml_ns:
- xml_name = "{}{}".format(xml_ns, xml_name)
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
return xml_name
-def xml_key_extractor(attr, attr_desc, data):
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
if isinstance(data, dict):
return None
@@ -1269,7 +1387,7 @@ def xml_key_extractor(attr, attr_desc, data):
# Integrate namespace if necessary
xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None))
if xml_ns:
- xml_name = "{}{}".format(xml_ns, xml_name)
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
# If it's an attribute, that's simple
if xml_desc.get("attr", False):
@@ -1301,22 +1419,21 @@ def xml_key_extractor(attr, attr_desc, data):
if is_iter_type:
if is_wrapped:
return None # is_wrapped no node, we want None
- else:
- return [] # not wrapped, assume empty list
+ return [] # not wrapped, assume empty list
return None # Assume it's not there, maybe an optional node.
# If is_iter_type and not wrapped, return all found children
if is_iter_type:
if not is_wrapped:
return children
- else: # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
)
- return list(children[0]) # Might be empty list and that's ok.
+ )
+ return list(children[0]) # Might be empty list and that's ok.
# Here it's not a itertype, we should have found one element only or empty
if len(children) > 1:
@@ -1333,9 +1450,9 @@ class Deserializer(object):
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
- def __init__(self, classes=None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.deserialize_type = {
"iso-8601": Deserializer.deserialize_iso,
"rfc-1123": Deserializer.deserialize_rfc,
@@ -1355,7 +1472,7 @@ def __init__(self, classes=None):
"duration": (isodate.Duration, datetime.timedelta),
"iso-8601": (datetime.datetime),
}
- self.dependencies = dict(classes) if classes else {}
+ self.dependencies: Dict[str, type] = dict(classes) if classes else {}
self.key_extractors = [rest_key_extractor, xml_key_extractor]
# Additional properties only works if the "rest_key_extractor" is used to
# extract the keys. Making it to work whatever the key extractor is too much
@@ -1373,11 +1490,12 @@ def __call__(self, target_obj, response_data, content_type=None):
:param str content_type: Swagger "produces" if available.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
data = self._unpack_content(response_data, content_type)
return self._deserialize(target_obj, data)
- def _deserialize(self, target_obj, data):
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
"""Call the deserializer on a model.
Data needs to be already deserialized as JSON or XML ElementTree
@@ -1386,12 +1504,13 @@ def _deserialize(self, target_obj, data):
:param object data: Object to deserialize.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
# This is already a model, go recursive just in case
if hasattr(data, "_attribute_map"):
constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
try:
- for attr, mapconfig in data._attribute_map.items():
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
if attr in constants:
continue
value = getattr(data, attr)
@@ -1408,15 +1527,15 @@ def _deserialize(self, target_obj, data):
response, class_name = self._classify_target(target_obj, data)
- if isinstance(response, basestring):
+ if isinstance(response, str):
return self.deserialize_data(data, response)
- elif isinstance(response, type) and issubclass(response, Enum):
+ if isinstance(response, type) and issubclass(response, Enum):
return self.deserialize_enum(data, response)
- if data is None:
+ if data is None or data is CoreNull:
return data
try:
- attributes = response._attribute_map
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
d_attrs = {}
for attr, attr_desc in attributes.items():
# Check empty string. If it's not empty, someone has a real "additionalProperties"...
@@ -1444,11 +1563,10 @@ def _deserialize(self, target_obj, data):
value = self.deserialize_data(raw_value, attr_desc["type"])
d_attrs[attr] = value
except (AttributeError, TypeError, KeyError) as err:
- msg = "Unable to deserialize to object: " + class_name
- raise_with_traceback(DeserializationError, msg, err)
- else:
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
+ msg = "Unable to deserialize to object: " + class_name # type: ignore
+ raise DeserializationError(msg) from err
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
def _build_additional_properties(self, attribute_map, data):
if not self.additional_properties_detection:
@@ -1474,22 +1592,24 @@ def _classify_target(self, target, data):
Once classification has been determined, initialize object.
:param str target: The target object type to deserialize to.
- :param str/dict data: The response data to deseralize.
+ :param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
"""
if target is None:
return None, None
- if isinstance(target, basestring):
+ if isinstance(target, str):
try:
target = self.dependencies[target]
except KeyError:
return target, target
try:
- target = target._classify(data, self.dependencies)
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
except AttributeError:
pass # Target is not a Model, no classify
- return target, target.__class__.__name__
+ return target, target.__class__.__name__ # type: ignore
def failsafe_deserialize(self, target_obj, data, content_type=None):
"""Ignores any errors encountered in deserialization,
@@ -1499,12 +1619,14 @@ def failsafe_deserialize(self, target_obj, data, content_type=None):
a deserialization error.
:param str target_obj: The target object type to deserialize to.
- :param str/dict data: The response data to deseralize.
+ :param str/dict data: The response data to deserialize.
:param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
"""
try:
return self(target_obj, data, content_type=content_type)
- except:
+ except: # pylint: disable=bare-except
_LOGGER.debug(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -1522,10 +1644,12 @@ def _unpack_content(raw_data, content_type=None):
If raw_data is something else, bypass all logic and return it directly.
- :param raw_data: Data to be processed.
- :param content_type: How to parse if raw_data is a string/bytes.
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
:raises JSONDecodeError: If JSON is requested and parsing is impossible.
:raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
"""
# Assume this is enough to detect a Pipeline Response without importing it
context = getattr(raw_data, "context", {})
@@ -1542,21 +1666,28 @@ def _unpack_content(raw_data, content_type=None):
if hasattr(raw_data, "_content_consumed"):
return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers)
- if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"):
- return RawDeserializer.deserialize_from_text(raw_data, content_type)
+ if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"):
+ return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore
return raw_data
def _instantiate_model(self, response, attrs, additional_properties=None):
"""Instantiate a response model passing in deserialized args.
- :param response: The response model class.
- :param d_attrs: The deserialized response attributes.
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
"""
if callable(response):
subtype = getattr(response, "_subtype_map", {})
try:
- readonly = [k for k, v in response._validation.items() if v.get("readonly")]
- const = [k for k, v in response._validation.items() if v.get("constant")]
+ readonly = [
+ k for k, v in response._validation.items() if v.get("readonly") # pylint: disable=protected-access
+ ]
+ const = [
+ k for k, v in response._validation.items() if v.get("constant") # pylint: disable=protected-access
+ ]
kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
response_obj = response(**kwargs)
for attr in readonly:
@@ -1565,8 +1696,8 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
response_obj.additional_properties = additional_properties
return response_obj
except TypeError as err:
- msg = "Unable to deserialize {} into model {}. ".format(kwargs, response)
- raise DeserializationError(msg + str(err))
+ msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
+ raise DeserializationError(msg + str(err)) from err
else:
try:
for attr, value in attrs.items():
@@ -1575,15 +1706,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
except Exception as exp:
msg = "Unable to populate response model. "
msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg)
+ raise DeserializationError(msg) from exp
- def deserialize_data(self, data, data_type):
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
"""Process data for deserialization according to data type.
:param str data: The response string to be deserialized.
:param str data_type: The type to deserialize to.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
if data is None:
return data
@@ -1597,7 +1729,11 @@ def deserialize_data(self, data, data_type):
if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
return data
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
return None
data_val = self.deserialize_type[data_type](data)
@@ -1616,15 +1752,15 @@ def deserialize_data(self, data, data_type):
except (ValueError, TypeError, AttributeError) as err:
msg = "Unable to deserialize response data."
msg += " Data: {}, {}".format(data, data_type)
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return self._deserialize(obj_type, data)
+ raise DeserializationError(msg) from err
+ return self._deserialize(obj_type, data)
def deserialize_iter(self, attr, iter_type):
"""Deserialize an iterable.
:param list attr: Iterable to be deserialized.
:param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
:rtype: list
"""
if attr is None:
@@ -1641,6 +1777,7 @@ def deserialize_dict(self, attr, dict_type):
:param dict/list attr: Dictionary to be deserialized. Also accepts
a list of key, value pairs.
:param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
:rtype: dict
"""
if isinstance(attr, list):
@@ -1651,11 +1788,12 @@ def deserialize_dict(self, attr, dict_type):
attr = {el.tag: el.text for el in attr}
return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
- def deserialize_object(self, attr, **kwargs):
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Deserialize a generic object.
This will be handled as a dictionary.
:param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
:rtype: dict
:raises: TypeError if non-builtin datatype encountered.
"""
@@ -1664,7 +1802,7 @@ def deserialize_object(self, attr, **kwargs):
if isinstance(attr, ET.Element):
# Do no recurse on XML, just return the tree as-is
return attr
- if isinstance(attr, basestring):
+ if isinstance(attr, str):
return self.deserialize_basic(attr, "str")
obj_type = type(attr)
if obj_type in self.basic_types:
@@ -1690,11 +1828,10 @@ def deserialize_object(self, attr, **kwargs):
pass
return deserialized
- else:
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
- def deserialize_basic(self, attr, data_type):
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
"""Deserialize basic builtin data type from string.
Will attempt to convert to str, int, float and bool.
This function will also accept '1', '0', 'true' and 'false' as
@@ -1702,6 +1839,7 @@ def deserialize_basic(self, attr, data_type):
:param str attr: response string to be deserialized.
:param str data_type: deserialization data type.
+ :return: Deserialized basic type.
:rtype: str, int, float or bool
:raises: TypeError if string format is not valid.
"""
@@ -1713,24 +1851,23 @@ def deserialize_basic(self, attr, data_type):
if data_type == "str":
# None or '', node is empty string.
return ""
- else:
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
if data_type == "bool":
if attr in [True, False, 1, 0]:
return bool(attr)
- elif isinstance(attr, basestring):
+ if isinstance(attr, str):
if attr.lower() in ["true", "1"]:
return True
- elif attr.lower() in ["false", "0"]:
+ if attr.lower() in ["false", "0"]:
return False
raise TypeError("Invalid boolean value: {}".format(attr))
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
@staticmethod
def deserialize_unicode(data):
@@ -1738,6 +1875,7 @@ def deserialize_unicode(data):
as a string.
:param str data: response string to be deserialized.
+ :return: Deserialized string.
:rtype: str or unicode
"""
# We might be here because we have an enum modeled as string,
@@ -1747,12 +1885,11 @@ def deserialize_unicode(data):
# Consider this is real string
try:
- if isinstance(data, unicode):
+ if isinstance(data, unicode): # type: ignore
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
@staticmethod
def deserialize_enum(data, enum_obj):
@@ -1764,6 +1901,7 @@ def deserialize_enum(data, enum_obj):
:param str data: Response string to be deserialized. If this value is
None or invalid it will be returned as-is.
:param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
:rtype: Enum
"""
if isinstance(data, enum_obj) or data is None:
@@ -1772,12 +1910,11 @@ def deserialize_enum(data, enum_obj):
data = data.value
if isinstance(data, int):
# Workaround. We might consider remove it in the future.
- # https://github.com/Azure/azure-rest-api-specs/issues/141
try:
return list(enum_obj.__members__.values())[data]
- except IndexError:
+ except IndexError as exc:
error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj))
+ raise DeserializationError(error.format(data, enum_obj)) from exc
try:
return enum_obj(str(data))
except ValueError:
@@ -1793,25 +1930,27 @@ def deserialize_bytearray(attr):
"""Deserialize string into bytearray.
:param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
if isinstance(attr, ET.Element):
attr = attr.text
- return bytearray(b64decode(attr))
+ return bytearray(b64decode(attr)) # type: ignore
@staticmethod
def deserialize_base64(attr):
"""Deserialize base64 encoded string into string.
:param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
if isinstance(attr, ET.Element):
attr = attr.text
- padding = "=" * (3 - (len(attr) + 3) % 4)
- attr = attr + padding
+ padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore
+ attr = attr + padding # type: ignore
encoded = attr.replace("-", "+").replace("_", "/")
return b64decode(encoded)
@@ -1820,34 +1959,37 @@ def deserialize_decimal(attr):
"""Deserialize string into Decimal object.
:param str attr: response string to be deserialized.
- :rtype: Decimal
+ :return: Deserialized decimal
:raises: DeserializationError if string format invalid.
+ :rtype: decimal
"""
if isinstance(attr, ET.Element):
attr = attr.text
try:
- return decimal.Decimal(attr)
+ return decimal.Decimal(str(attr)) # type: ignore
except decimal.DecimalException as err:
msg = "Invalid decimal {}".format(attr)
- raise_with_traceback(DeserializationError, msg, err)
+ raise DeserializationError(msg) from err
@staticmethod
def deserialize_long(attr):
"""Deserialize string into long (Py2) or int (Py3).
:param str attr: response string to be deserialized.
+ :return: Deserialized int
:rtype: long or int
:raises: ValueError if string format invalid.
"""
if isinstance(attr, ET.Element):
attr = attr.text
- return _long_type(attr)
+ return _long_type(attr) # type: ignore
@staticmethod
def deserialize_duration(attr):
"""Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
+ :return: Deserialized duration
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
"""
@@ -1857,36 +1999,37 @@ def deserialize_duration(attr):
duration = isodate.parse_duration(attr)
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return duration
+ raise DeserializationError(msg) from err
+ return duration
@staticmethod
def deserialize_date(attr):
"""Deserialize ISO-8601 formatted string into Date object.
:param str attr: response string to be deserialized.
+ :return: Deserialized date
:rtype: Date
:raises: DeserializationError if string format invalid.
"""
if isinstance(attr, ET.Element):
attr = attr.text
- if re.search(r"[^\W\d_]", attr, re.I + re.U):
+ if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
# This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
- return isodate.parse_date(attr, defaultmonth=None, defaultday=None)
+ return isodate.parse_date(attr, defaultmonth=0, defaultday=0)
@staticmethod
def deserialize_time(attr):
"""Deserialize ISO-8601 formatted string into time object.
:param str attr: response string to be deserialized.
+ :return: Deserialized time
:rtype: datetime.time
:raises: DeserializationError if string format invalid.
"""
if isinstance(attr, ET.Element):
attr = attr.text
- if re.search(r"[^\W\d_]", attr, re.I + re.U):
+ if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
return isodate.parse_time(attr)
@@ -1895,13 +2038,14 @@ def deserialize_rfc(attr):
"""Deserialize RFC-1123 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
if isinstance(attr, ET.Element):
attr = attr.text
try:
- parsed_date = email.utils.parsedate_tz(attr)
+ parsed_date = email.utils.parsedate_tz(attr) # type: ignore
date_obj = datetime.datetime(
*parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60))
)
@@ -1909,22 +2053,22 @@ def deserialize_rfc(attr):
date_obj = date_obj.astimezone(tz=TZ_UTC)
except ValueError as err:
msg = "Cannot deserialize to rfc datetime object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return date_obj
+ raise DeserializationError(msg) from err
+ return date_obj
@staticmethod
def deserialize_iso(attr):
"""Deserialize ISO-8601 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
if isinstance(attr, ET.Element):
attr = attr.text
try:
- attr = attr.upper()
+ attr = attr.upper() # type: ignore
match = Deserializer.valid_date.match(attr)
if not match:
raise ValueError("Invalid datetime string: " + attr)
@@ -1946,9 +2090,8 @@ def deserialize_iso(attr):
raise OverflowError("Hit max or min date")
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize datetime object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return date_obj
+ raise DeserializationError(msg) from err
+ return date_obj
@staticmethod
def deserialize_unix(attr):
@@ -1956,15 +2099,16 @@ def deserialize_unix(attr):
This is represented as seconds.
:param int attr: Object to be serialized.
+ :return: Deserialized datetime
:rtype: Datetime
:raises: DeserializationError if format invalid
"""
if isinstance(attr, ET.Element):
- attr = int(attr.text)
+ attr = int(attr.text) # type: ignore
try:
+ attr = int(attr)
date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC)
except ValueError as err:
msg = "Cannot deserialize to unix datetime object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return date_obj
+ raise DeserializationError(msg) from err
+ return date_obj
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_vendor.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_vendor.py
index 1410b4a70c7d..48549962bf5f 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_vendor.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_vendor.py
@@ -8,36 +8,14 @@
from abc import ABC
from typing import TYPE_CHECKING
-from azure.core.pipeline.transport import HttpRequest
-
from ._configuration import MicrosoftStorageSyncConfiguration
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core import PipelineClient
from ._serialization import Deserializer, Serializer
-def _convert_request(request, files=None):
- data = request.content if not files else None
- request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data)
- if files:
- request.set_formdata_body(files)
- return request
-
-
-def _format_url_section(template, **kwargs):
- components = template.split("/")
- while components:
- try:
- return template.format(**kwargs)
- except KeyError as key:
- formatted_components = template.split("/")
- components = [c for c in formatted_components if "{}".format(key.args[0]) not in c]
- template = "/".join(components)
-
-
class MicrosoftStorageSyncMixinABC(ABC):
"""DO NOT use this class. It is for internal typing use only."""
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_version.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_version.py
index e32dc6ec4218..e5754a47ce68 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_version.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/_version.py
@@ -6,4 +6,4 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-VERSION = "2.0.0b1"
+VERSION = "1.0.0b1"
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/__init__.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/__init__.py
index a7ff2085286b..6f779987b10e 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/__init__.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/__init__.py
@@ -5,12 +5,18 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._microsoft_storage_sync import MicrosoftStorageSync
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._microsoft_storage_sync import MicrosoftStorageSync # type: ignore
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -18,6 +24,6 @@
__all__ = [
"MicrosoftStorageSync",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/_configuration.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/_configuration.py
index 98bf0ad8b26e..a8c6a3b1800d 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/_configuration.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/_configuration.py
@@ -6,26 +6,18 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-import sys
from typing import Any, TYPE_CHECKING
-from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy
from .._version import VERSION
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
-else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
-
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
-class MicrosoftStorageSyncConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
+class MicrosoftStorageSyncConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for MicrosoftStorageSync.
Note that all parameters used to create this instance are saved as instance
@@ -33,16 +25,15 @@ class MicrosoftStorageSyncConfiguration(Configuration): # pylint: disable=too-m
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
- :param subscription_id: The ID of the target subscription. Required.
+ :param subscription_id: The ID of the target subscription. The value must be an UUID. Required.
:type subscription_id: str
- :keyword api_version: Api Version. Default value is "2022-06-01". Note that overriding this
+ :keyword api_version: Api Version. Default value is "2022-09-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None:
- super(MicrosoftStorageSyncConfiguration, self).__init__(**kwargs)
- api_version = kwargs.pop("api_version", "2022-06-01") # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", "2022-09-01")
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
@@ -54,6 +45,7 @@ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **k
self.api_version = api_version
self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"])
kwargs.setdefault("sdk_moniker", "mgmt-storagesync/{}".format(VERSION))
+ self.polling_interval = kwargs.get("polling_interval", 30)
self._configure(**kwargs)
def _configure(self, **kwargs: Any) -> None:
@@ -62,9 +54,9 @@ def _configure(self, **kwargs: Any) -> None:
self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs)
- self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs)
+ self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs)
self.authentication_policy = kwargs.get("authentication_policy")
if self.credential and not self.authentication_policy:
self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/_microsoft_storage_sync.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/_microsoft_storage_sync.py
index 5bfff3b37260..cc82aad689a3 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/_microsoft_storage_sync.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/_microsoft_storage_sync.py
@@ -8,11 +8,14 @@
from copy import deepcopy
from typing import Any, Awaitable, TYPE_CHECKING
+from typing_extensions import Self
+from azure.core.pipeline import policies
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
+from azure.mgmt.core.policies import AsyncARMAutoResourceProviderRegistrationPolicy
-from .. import models
+from .. import models as _models
from .._serialization import Deserializer, Serializer
from ._configuration import MicrosoftStorageSyncConfiguration
from .operations import (
@@ -30,14 +33,11 @@
)
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
-class MicrosoftStorageSync(
- MicrosoftStorageSyncOperationsMixin
-): # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes
- """Microsoft Storage Sync Service API.
+class MicrosoftStorageSync(MicrosoftStorageSyncOperationsMixin): # pylint: disable=too-many-instance-attributes
+ """Microsoft Storage Sync Service API. This belongs to Microsoft.StorageSync Resource Provider.
:ivar operations: Operations operations
:vartype operations: azure.mgmt.storagesync.aio.operations.Operations
@@ -64,11 +64,11 @@ class MicrosoftStorageSync(
:vartype operation_status: azure.mgmt.storagesync.aio.operations.OperationStatusOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
- :param subscription_id: The ID of the target subscription. Required.
+ :param subscription_id: The ID of the target subscription. The value must be an UUID. Required.
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
- :keyword api_version: Api Version. Default value is "2022-06-01". Note that overriding this
+ :keyword api_version: Api Version. Default value is "2022-09-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
@@ -85,9 +85,27 @@ def __init__(
self._config = MicrosoftStorageSyncConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
- self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
-
- client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ _policies = kwargs.pop("policies", None)
+ if _policies is None:
+ _policies = [
+ policies.RequestIdPolicy(**kwargs),
+ self._config.headers_policy,
+ self._config.user_agent_policy,
+ self._config.proxy_policy,
+ policies.ContentDecodePolicy(**kwargs),
+ AsyncARMAutoResourceProviderRegistrationPolicy(),
+ self._config.redirect_policy,
+ self._config.retry_policy,
+ self._config.authentication_policy,
+ self._config.custom_hook_policy,
+ self._config.logging_policy,
+ policies.DistributedTracingPolicy(**kwargs),
+ policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None,
+ self._config.http_logging_policy,
+ ]
+ self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, policies=_policies, **kwargs)
+
+ client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
@@ -114,7 +132,9 @@ def __init__(
self._client, self._config, self._serialize, self._deserialize
)
- def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
+ def _send_request(
+ self, request: HttpRequest, *, stream: bool = False, **kwargs: Any
+ ) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
@@ -134,14 +154,14 @@ def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncH
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
- return self._client.send_request(request_copy, **kwargs)
+ return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore
async def close(self) -> None:
await self._client.close()
- async def __aenter__(self) -> "MicrosoftStorageSync":
+ async def __aenter__(self) -> Self:
await self._client.__aenter__()
return self
- async def __aexit__(self, *exc_details) -> None:
+ async def __aexit__(self, *exc_details: Any) -> None:
await self._client.__aexit__(*exc_details)
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/_vendor.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/_vendor.py
index 79f8d1a6b4c0..a04ef1ec4263 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/_vendor.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/_vendor.py
@@ -8,12 +8,9 @@
from abc import ABC
from typing import TYPE_CHECKING
-from azure.core.pipeline.transport import HttpRequest
-
from ._configuration import MicrosoftStorageSyncConfiguration
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core import AsyncPipelineClient
from .._serialization import Deserializer, Serializer
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/__init__.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/__init__.py
index 379e4078c187..465f08bed980 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/__init__.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/__init__.py
@@ -5,21 +5,27 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._operations import Operations
-from ._storage_sync_services_operations import StorageSyncServicesOperations
-from ._private_link_resources_operations import PrivateLinkResourcesOperations
-from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
-from ._sync_groups_operations import SyncGroupsOperations
-from ._cloud_endpoints_operations import CloudEndpointsOperations
-from ._server_endpoints_operations import ServerEndpointsOperations
-from ._registered_servers_operations import RegisteredServersOperations
-from ._workflows_operations import WorkflowsOperations
-from ._operation_status_operations import OperationStatusOperations
-from ._microsoft_storage_sync_operations import MicrosoftStorageSyncOperationsMixin
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._operations import Operations # type: ignore
+from ._storage_sync_services_operations import StorageSyncServicesOperations # type: ignore
+from ._private_link_resources_operations import PrivateLinkResourcesOperations # type: ignore
+from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations # type: ignore
+from ._sync_groups_operations import SyncGroupsOperations # type: ignore
+from ._cloud_endpoints_operations import CloudEndpointsOperations # type: ignore
+from ._server_endpoints_operations import ServerEndpointsOperations # type: ignore
+from ._registered_servers_operations import RegisteredServersOperations # type: ignore
+from ._workflows_operations import WorkflowsOperations # type: ignore
+from ._operation_status_operations import OperationStatusOperations # type: ignore
+from ._microsoft_storage_sync_operations import MicrosoftStorageSyncOperationsMixin # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -35,5 +41,5 @@
"OperationStatusOperations",
"MicrosoftStorageSyncOperationsMixin",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_cloud_endpoints_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_cloud_endpoints_operations.py
index 76ded6133994..6734c04905de 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_cloud_endpoints_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_cloud_endpoints_operations.py
@@ -6,8 +6,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -17,12 +18,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -30,7 +32,6 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._cloud_endpoints_operations import (
build_afs_share_metadata_certificate_public_keys_request,
build_create_request,
@@ -44,12 +45,11 @@
build_restoreheartbeat_request,
build_trigger_change_detection_request,
)
-from .._vendor import MicrosoftStorageSyncMixinABC
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -79,10 +79,10 @@ async def _create_initial(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.CloudEndpointCreateParameters, IO],
+ parameters: Union[_models.CloudEndpointCreateParameters, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.CloudEndpoint]:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -93,21 +93,19 @@ async def _create_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.CloudEndpoint]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "CloudEndpointCreateParameters")
- request = build_create_request(
+ _request = build_create_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -117,25 +115,28 @@ async def _create_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
if response.status_code == 200:
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
@@ -143,8 +144,6 @@ async def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("CloudEndpoint", pipeline_response)
-
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
@@ -156,12 +155,12 @@ async def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
- return deserialized
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}"} # type: ignore
+ return deserialized # type: ignore
@overload
async def begin_create(
@@ -191,14 +190,6 @@ async def begin_create(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either CloudEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.CloudEndpoint]
@@ -212,7 +203,7 @@ async def begin_create(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -229,18 +220,10 @@ async def begin_create(
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Cloud Endpoint resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either CloudEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.CloudEndpoint]
@@ -254,7 +237,7 @@ async def begin_create(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.CloudEndpointCreateParameters, IO],
+ parameters: Union[_models.CloudEndpointCreateParameters, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.CloudEndpoint]:
"""Create a new CloudEndpoint.
@@ -268,20 +251,9 @@ async def begin_create(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :param parameters: Body of Cloud Endpoint resource. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.storagesync.models.CloudEndpointCreateParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Cloud Endpoint resource. Is either a CloudEndpointCreateParameters
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.CloudEndpointCreateParameters or IO[bytes]
:return: An instance of AsyncLROPoller that returns either CloudEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.CloudEndpoint]
@@ -290,16 +262,14 @@ async def begin_create(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.CloudEndpoint]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.CloudEndpoint] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._create_initial( # type: ignore
+ raw_result = await self._create_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -312,6 +282,7 @@ async def begin_create(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
@@ -322,27 +293,27 @@ def get_long_running_output(pipeline_response):
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("CloudEndpoint", pipeline_response)
+ deserialized = self._deserialize("CloudEndpoint", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.CloudEndpoint].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}"} # type: ignore
+ return AsyncLROPoller[_models.CloudEndpoint](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace_async
async def get(
@@ -364,12 +335,11 @@ async def get(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: CloudEndpoint or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.CloudEndpoint
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -380,27 +350,24 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.CloudEndpoint]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.CloudEndpoint] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -416,24 +383,22 @@ async def get(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("CloudEndpoint", pipeline_response)
+ deserialized = self._deserialize("CloudEndpoint", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- return deserialized
+ return deserialized # type: ignore
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}"} # type: ignore
-
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
+ async def _delete_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -444,32 +409,34 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -492,10 +459,12 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(
@@ -517,14 +486,6 @@ async def begin_delete(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -532,15 +493,13 @@ async def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -551,28 +510,27 @@ async def begin_delete(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}"} # type: ignore
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace
def list_by_sync_group(
@@ -587,7 +545,6 @@ def list_by_sync_group(
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CloudEndpoint or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storagesync.models.CloudEndpoint]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -595,12 +552,10 @@ def list_by_sync_group(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.CloudEndpointArray]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.CloudEndpointArray] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -611,18 +566,16 @@ def list_by_sync_group(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_sync_group_request(
+ _request = build_list_by_sync_group_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_sync_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -634,26 +587,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("CloudEndpointArray", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -666,18 +619,16 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list_by_sync_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints"} # type: ignore
-
- async def _pre_backup_initial( # pylint: disable=inconsistent-return-statements
+ async def _pre_backup_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.BackupRequest, IO],
+ parameters: Union[_models.BackupRequest, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -688,21 +639,19 @@ async def _pre_backup_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "BackupRequest")
- request = build_pre_backup_request(
+ _request = build_pre_backup_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -712,43 +661,41 @@ async def _pre_backup_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._pre_backup_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
- if response.status_code == 200:
- response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
- response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
- response_headers["x-ms-correlation-request-id"] = self._deserialize(
- "str", response.headers.get("x-ms-correlation-request-id")
- )
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-correlation-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-correlation-request-id")
+ )
- if response.status_code == 202:
- response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
- response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
- response_headers["x-ms-correlation-request-id"] = self._deserialize(
- "str", response.headers.get("x-ms-correlation-request-id")
- )
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _pre_backup_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/prebackup"} # type: ignore
+ return deserialized # type: ignore
@overload
async def begin_pre_backup(
@@ -778,14 +725,6 @@ async def begin_pre_backup(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -798,7 +737,7 @@ async def begin_pre_backup(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -815,18 +754,10 @@ async def begin_pre_backup(
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Backup request. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -839,7 +770,7 @@ async def begin_pre_backup(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.BackupRequest, IO],
+ parameters: Union[_models.BackupRequest, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Pre Backup a given CloudEndpoint.
@@ -853,19 +784,9 @@ async def begin_pre_backup(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :param parameters: Body of Backup request. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.storagesync.models.BackupRequest or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Backup request. Is either a BackupRequest type or a IO[bytes] type.
+ Required.
+ :type parameters: ~azure.mgmt.storagesync.models.BackupRequest or IO[bytes]
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -873,16 +794,14 @@ async def begin_pre_backup(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._pre_backup_initial( # type: ignore
+ raw_result = await self._pre_backup_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -895,28 +814,27 @@ async def begin_pre_backup(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_pre_backup.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/prebackup"} # type: ignore
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
async def _post_backup_initial(
self,
@@ -924,10 +842,10 @@ async def _post_backup_initial(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.BackupRequest, IO],
+ parameters: Union[_models.BackupRequest, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.PostBackupResponse]:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -938,21 +856,19 @@ async def _post_backup_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.PostBackupResponse]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "BackupRequest")
- request = build_post_backup_request(
+ _request = build_post_backup_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -962,48 +878,41 @@ async def _post_backup_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._post_backup_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
- if response.status_code == 200:
- response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
- response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
- response_headers["x-ms-correlation-request-id"] = self._deserialize(
- "str", response.headers.get("x-ms-correlation-request-id")
- )
-
- deserialized = self._deserialize("PostBackupResponse", pipeline_response)
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-correlation-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-correlation-request-id")
+ )
- if response.status_code == 202:
- response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
- response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
- response_headers["x-ms-correlation-request-id"] = self._deserialize(
- "str", response.headers.get("x-ms-correlation-request-id")
- )
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
-
- return deserialized
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _post_backup_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/postbackup"} # type: ignore
+ return deserialized # type: ignore
@overload
async def begin_post_backup(
@@ -1033,14 +942,6 @@ async def begin_post_backup(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PostBackupResponse or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.PostBackupResponse]
@@ -1054,7 +955,7 @@ async def begin_post_backup(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -1071,18 +972,10 @@ async def begin_post_backup(
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Backup request. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PostBackupResponse or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.PostBackupResponse]
@@ -1096,7 +989,7 @@ async def begin_post_backup(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.BackupRequest, IO],
+ parameters: Union[_models.BackupRequest, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.PostBackupResponse]:
"""Post Backup a given CloudEndpoint.
@@ -1110,19 +1003,9 @@ async def begin_post_backup(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :param parameters: Body of Backup request. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.storagesync.models.BackupRequest or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Backup request. Is either a BackupRequest type or a IO[bytes] type.
+ Required.
+ :type parameters: ~azure.mgmt.storagesync.models.BackupRequest or IO[bytes]
:return: An instance of AsyncLROPoller that returns either PostBackupResponse or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.PostBackupResponse]
@@ -1131,16 +1014,14 @@ async def begin_post_backup(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.PostBackupResponse]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.PostBackupResponse] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._post_backup_initial( # type: ignore
+ raw_result = await self._post_backup_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1153,6 +1034,7 @@ async def begin_post_backup(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
@@ -1164,38 +1046,38 @@ def get_long_running_output(pipeline_response):
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("PostBackupResponse", pipeline_response)
+ deserialized = self._deserialize("PostBackupResponse", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.PostBackupResponse].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_post_backup.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/postbackup"} # type: ignore
+ return AsyncLROPoller[_models.PostBackupResponse](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- async def _pre_restore_initial( # pylint: disable=inconsistent-return-statements
+ async def _pre_restore_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.PreRestoreRequest, IO],
+ parameters: Union[_models.PreRestoreRequest, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1206,21 +1088,19 @@ async def _pre_restore_initial( # pylint: disable=inconsistent-return-statement
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PreRestoreRequest")
- request = build_pre_restore_request(
+ _request = build_pre_restore_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1230,20 +1110,24 @@ async def _pre_restore_initial( # pylint: disable=inconsistent-return-statement
content_type=content_type,
json=_json,
content=_content,
- template_url=self._pre_restore_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -1256,10 +1140,12 @@ async def _pre_restore_initial( # pylint: disable=inconsistent-return-statement
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _pre_restore_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/prerestore"} # type: ignore
+ return deserialized # type: ignore
@overload
async def begin_pre_restore(
@@ -1289,14 +1175,6 @@ async def begin_pre_restore(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1309,7 +1187,7 @@ async def begin_pre_restore(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -1326,18 +1204,10 @@ async def begin_pre_restore(
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Cloud Endpoint object. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1350,7 +1220,7 @@ async def begin_pre_restore(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.PreRestoreRequest, IO],
+ parameters: Union[_models.PreRestoreRequest, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Pre Restore a given CloudEndpoint.
@@ -1364,20 +1234,9 @@ async def begin_pre_restore(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :param parameters: Body of Cloud Endpoint object. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.storagesync.models.PreRestoreRequest or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Cloud Endpoint object. Is either a PreRestoreRequest type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.PreRestoreRequest or IO[bytes]
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1385,16 +1244,14 @@ async def begin_pre_restore(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._pre_restore_initial( # type: ignore
+ raw_result = await self._pre_restore_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1407,31 +1264,30 @@ async def begin_pre_restore(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_pre_restore.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/prerestore"} # type: ignore
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace_async
- async def restoreheartbeat( # pylint: disable=inconsistent-return-statements
+ async def restoreheartbeat(
self,
resource_group_name: str,
storage_sync_service_name: str,
@@ -1450,12 +1306,11 @@ async def restoreheartbeat( # pylint: disable=inconsistent-return-statements
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1466,27 +1321,24 @@ async def restoreheartbeat( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
- request = build_restoreheartbeat_request(
+ _request = build_restoreheartbeat_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.restoreheartbeat.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1503,20 +1355,18 @@ async def restoreheartbeat( # pylint: disable=inconsistent-return-statements
)
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, None, response_headers) # type: ignore
- restoreheartbeat.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/restoreheartbeat"} # type: ignore
-
- async def _post_restore_initial( # pylint: disable=inconsistent-return-statements
+ async def _post_restore_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.PostRestoreRequest, IO],
+ parameters: Union[_models.PostRestoreRequest, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1527,21 +1377,19 @@ async def _post_restore_initial( # pylint: disable=inconsistent-return-statemen
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PostRestoreRequest")
- request = build_post_restore_request(
+ _request = build_post_restore_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1551,20 +1399,24 @@ async def _post_restore_initial( # pylint: disable=inconsistent-return-statemen
content_type=content_type,
json=_json,
content=_content,
- template_url=self._post_restore_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -1577,10 +1429,12 @@ async def _post_restore_initial( # pylint: disable=inconsistent-return-statemen
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _post_restore_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/postrestore"} # type: ignore
+ return deserialized # type: ignore
@overload
async def begin_post_restore(
@@ -1610,14 +1464,6 @@ async def begin_post_restore(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1630,7 +1476,7 @@ async def begin_post_restore(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -1647,18 +1493,10 @@ async def begin_post_restore(
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Cloud Endpoint object. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1671,7 +1509,7 @@ async def begin_post_restore(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.PostRestoreRequest, IO],
+ parameters: Union[_models.PostRestoreRequest, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Post Restore a given CloudEndpoint.
@@ -1685,20 +1523,9 @@ async def begin_post_restore(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :param parameters: Body of Cloud Endpoint object. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.storagesync.models.PostRestoreRequest or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Cloud Endpoint object. Is either a PostRestoreRequest type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.PostRestoreRequest or IO[bytes]
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1706,16 +1533,14 @@ async def begin_post_restore(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._post_restore_initial( # type: ignore
+ raw_result = await self._post_restore_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1728,39 +1553,38 @@ async def begin_post_restore(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_post_restore.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/postrestore"} # type: ignore
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _trigger_change_detection_initial( # pylint: disable=inconsistent-return-statements
+ async def _trigger_change_detection_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.TriggerChangeDetectionParameters, IO],
+ parameters: Union[_models.TriggerChangeDetectionParameters, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1771,21 +1595,19 @@ async def _trigger_change_detection_initial( # pylint: disable=inconsistent-ret
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "TriggerChangeDetectionParameters")
- request = build_trigger_change_detection_request(
+ _request = build_trigger_change_detection_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1795,20 +1617,24 @@ async def _trigger_change_detection_initial( # pylint: disable=inconsistent-ret
content_type=content_type,
json=_json,
content=_content,
- template_url=self._trigger_change_detection_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -1821,10 +1647,12 @@ async def _trigger_change_detection_initial( # pylint: disable=inconsistent-ret
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _trigger_change_detection_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/triggerChangeDetection"} # type: ignore
+ return deserialized # type: ignore
@overload
async def begin_trigger_change_detection(
@@ -1855,14 +1683,6 @@ async def begin_trigger_change_detection(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1875,7 +1695,7 @@ async def begin_trigger_change_detection(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -1893,18 +1713,10 @@ async def begin_trigger_change_detection(
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Trigger Change Detection Action parameters. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1917,7 +1729,7 @@ async def begin_trigger_change_detection(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.TriggerChangeDetectionParameters, IO],
+ parameters: Union[_models.TriggerChangeDetectionParameters, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Triggers detection of changes performed on Azure File share connected to the specified Azure
@@ -1932,20 +1744,9 @@ async def begin_trigger_change_detection(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :param parameters: Trigger Change Detection Action parameters. Is either a model type or a IO
- type. Required.
- :type parameters: ~azure.mgmt.storagesync.models.TriggerChangeDetectionParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Trigger Change Detection Action parameters. Is either a
+ TriggerChangeDetectionParameters type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.TriggerChangeDetectionParameters or IO[bytes]
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1953,16 +1754,14 @@ async def begin_trigger_change_detection(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._trigger_change_detection_initial( # type: ignore
+ raw_result = await self._trigger_change_detection_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1975,31 +1774,30 @@ async def begin_trigger_change_detection(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_trigger_change_detection.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/triggerChangeDetection"} # type: ignore
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace_async
- async def afs_share_metadata_certificate_public_keys(
+ async def afs_share_metadata_certificate_public_keys( # pylint: disable=name-too-long
self,
resource_group_name: str,
storage_sync_service_name: str,
@@ -2018,12 +1816,11 @@ async def afs_share_metadata_certificate_public_keys(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: CloudEndpointAfsShareMetadataCertificatePublicKeys or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.CloudEndpointAfsShareMetadataCertificatePublicKeys
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2034,27 +1831,24 @@ async def afs_share_metadata_certificate_public_keys(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.CloudEndpointAfsShareMetadataCertificatePublicKeys]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.CloudEndpointAfsShareMetadataCertificatePublicKeys] = kwargs.pop("cls", None)
- request = build_afs_share_metadata_certificate_public_keys_request(
+ _request = build_afs_share_metadata_certificate_public_keys_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.afs_share_metadata_certificate_public_keys.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -2070,11 +1864,11 @@ async def afs_share_metadata_certificate_public_keys(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("CloudEndpointAfsShareMetadataCertificatePublicKeys", pipeline_response)
+ deserialized = self._deserialize(
+ "CloudEndpointAfsShareMetadataCertificatePublicKeys", pipeline_response.http_response
+ )
if cls:
- return cls(pipeline_response, deserialized, response_headers)
-
- return deserialized
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- afs_share_metadata_certificate_public_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/afsShareMetadataCertificatePublicKeys"} # type: ignore
+ return deserialized # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_microsoft_storage_sync_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_microsoft_storage_sync_operations.py
index 2d97bed8aa7b..4a05a9743137 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_microsoft_storage_sync_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_microsoft_storage_sync_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -18,26 +17,25 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._microsoft_storage_sync_operations import build_location_operation_status_request
from .._vendor import MicrosoftStorageSyncMixinABC
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class MicrosoftStorageSyncOperationsMixin(MicrosoftStorageSyncMixinABC):
+
@distributed_trace_async
async def location_operation_status(
self, location_name: str, operation_id: str, **kwargs: Any
@@ -48,12 +46,11 @@ async def location_operation_status(
:type location_name: str
:param operation_id: operation Id. Required.
:type operation_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: LocationOperationStatus or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.LocationOperationStatus
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -64,25 +61,22 @@ async def location_operation_status(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.LocationOperationStatus]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.LocationOperationStatus] = kwargs.pop("cls", None)
- request = build_location_operation_status_request(
+ _request = build_location_operation_status_request(
location_name=location_name,
operation_id=operation_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.location_operation_status.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -98,11 +92,9 @@ async def location_operation_status(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("LocationOperationStatus", pipeline_response)
+ deserialized = self._deserialize("LocationOperationStatus", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
-
- return deserialized
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- location_operation_status.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StorageSync/locations/{locationName}/operations/{operationId}"} # type: ignore
+ return deserialized # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_operation_status_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_operation_status_operations.py
index 4bc17c675572..729e7362f771 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_operation_status_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_operation_status_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -18,21 +17,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._operation_status_operations import build_get_request
-from .._vendor import MicrosoftStorageSyncMixinABC
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -71,12 +67,11 @@ async def get(
:type workflow_id: str
:param operation_id: operation Id. Required.
:type operation_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: OperationStatus or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.OperationStatus
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -87,27 +82,24 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationStatus]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.OperationStatus] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
location_name=location_name,
workflow_id=workflow_id,
operation_id=operation_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -123,11 +115,9 @@ async def get(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("OperationStatus", pipeline_response)
+ deserialized = self._deserialize("OperationStatus", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- return deserialized
-
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/locations/{locationName}/workflows/{workflowId}/operations/{operationId}"} # type: ignore
+ return deserialized # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_operations.py
index 6357911d345f..f0a1663fd72d 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -20,21 +19,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._operations import build_list_request
-from .._vendor import MicrosoftStorageSyncMixinABC
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -62,7 +58,6 @@ def __init__(self, *args, **kwargs) -> None:
def list(self, **kwargs: Any) -> AsyncIterable["_models.OperationEntity"]:
"""Lists all of the available Storage Sync Rest API operations.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OperationEntity or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storagesync.models.OperationEntity]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -70,12 +65,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.OperationEntity"]:
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationEntityListResult]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.OperationEntityListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -86,14 +79,12 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.OperationEntity"]:
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -105,26 +96,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("OperationEntityListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -136,5 +127,3 @@ async def get_next(next_link=None):
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
-
- list.metadata = {"url": "/providers/Microsoft.StorageSync/operations"} # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_private_endpoint_connections_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_private_endpoint_connections_operations.py
index 88d6ba802a61..3258b7a7d3ad 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_private_endpoint_connections_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_private_endpoint_connections_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,8 +5,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -17,12 +17,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -30,19 +31,17 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._private_endpoint_connections_operations import (
build_create_request,
build_delete_request,
build_get_request,
build_list_by_storage_sync_service_request,
)
-from .._vendor import MicrosoftStorageSyncMixinABC
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -85,12 +84,11 @@ async def get(
:param private_endpoint_connection_name: The name of the private endpoint connection associated
with the Azure resource. Required.
:type private_endpoint_connection_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateEndpointConnection or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.PrivateEndpointConnection
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -101,26 +99,23 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnection]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -130,24 +125,22 @@ async def get(
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
+ deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- return deserialized
-
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore
+ return deserialized # type: ignore
async def _create_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
private_endpoint_connection_name: str,
- properties: Union[_models.PrivateEndpointConnection, IO],
+ properties: Union[_models.PrivateEndpointConnection, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.PrivateEndpointConnection]:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -158,21 +151,19 @@ async def _create_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.PrivateEndpointConnection]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(properties, (IO, bytes)):
+ if isinstance(properties, (IOBase, bytes)):
_content = properties
else:
_json = self._serialize.body(properties, "PrivateEndpointConnection")
- request = build_create_request(
+ _request = build_create_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
private_endpoint_connection_name=private_endpoint_connection_name,
@@ -181,29 +172,29 @@ async def _create_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
- if response.status_code == 200:
- deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
-
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
@@ -215,12 +206,12 @@ async def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
- return deserialized
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore
+ return deserialized # type: ignore
@overload
async def begin_create(
@@ -250,14 +241,6 @@ async def begin_create(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PrivateEndpointConnection or the
result of cls(response)
:rtype:
@@ -271,7 +254,7 @@ async def begin_create(
resource_group_name: str,
storage_sync_service_name: str,
private_endpoint_connection_name: str,
- properties: IO,
+ properties: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -289,18 +272,10 @@ async def begin_create(
with the Azure resource. Required.
:type private_endpoint_connection_name: str
:param properties: The private endpoint connection properties. Required.
- :type properties: IO
+ :type properties: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PrivateEndpointConnection or the
result of cls(response)
:rtype:
@@ -314,7 +289,7 @@ async def begin_create(
resource_group_name: str,
storage_sync_service_name: str,
private_endpoint_connection_name: str,
- properties: Union[_models.PrivateEndpointConnection, IO],
+ properties: Union[_models.PrivateEndpointConnection, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.PrivateEndpointConnection]:
"""Update the state of specified private endpoint connection associated with the storage sync
@@ -329,20 +304,9 @@ async def begin_create(
:param private_endpoint_connection_name: The name of the private endpoint connection associated
with the Azure resource. Required.
:type private_endpoint_connection_name: str
- :param properties: The private endpoint connection properties. Is either a model type or a IO
- type. Required.
- :type properties: ~azure.mgmt.storagesync.models.PrivateEndpointConnection or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param properties: The private endpoint connection properties. Is either a
+ PrivateEndpointConnection type or a IO[bytes] type. Required.
+ :type properties: ~azure.mgmt.storagesync.models.PrivateEndpointConnection or IO[bytes]
:return: An instance of AsyncLROPoller that returns either PrivateEndpointConnection or the
result of cls(response)
:rtype:
@@ -352,16 +316,14 @@ async def begin_create(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnection]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._create_initial( # type: ignore
+ raw_result = await self._create_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
private_endpoint_connection_name=private_endpoint_connection_name,
@@ -373,39 +335,40 @@ async def begin_create(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
+ deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.PrivateEndpointConnection].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore
+ return AsyncLROPoller[_models.PrivateEndpointConnection](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
+ async def _delete_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
private_endpoint_connection_name: str,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -416,31 +379,33 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -457,10 +422,12 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(
@@ -481,14 +448,6 @@ async def begin_delete(
:param private_endpoint_connection_name: The name of the private endpoint connection associated
with the Azure resource. Required.
:type private_endpoint_connection_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -496,15 +455,13 @@ async def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
private_endpoint_connection_name=private_endpoint_connection_name,
@@ -514,28 +471,27 @@ async def begin_delete(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace
def list_by_storage_sync_service(
@@ -548,7 +504,6 @@ def list_by_storage_sync_service(
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PrivateEndpointConnection or the result of
cls(response)
:rtype:
@@ -558,12 +513,10 @@ def list_by_storage_sync_service(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnectionListResult]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.PrivateEndpointConnectionListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -574,17 +527,15 @@ def list_by_storage_sync_service(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_storage_sync_service_request(
+ _request = build_list_by_storage_sync_service_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_storage_sync_service.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -596,26 +547,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("PrivateEndpointConnectionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -627,5 +578,3 @@ async def get_next(next_link=None):
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
-
- list_by_storage_sync_service.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/privateEndpointConnections"} # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_private_link_resources_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_private_link_resources_operations.py
index db09221095c7..d2ba44dd98cc 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_private_link_resources_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_private_link_resources_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -18,21 +17,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._private_link_resources_operations import build_list_by_storage_sync_service_request
-from .._vendor import MicrosoftStorageSyncMixinABC
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -68,12 +64,11 @@ async def list_by_storage_sync_service(
:param storage_sync_service_name: The name of the storage sync service name within the
specified resource group. Required.
:type storage_sync_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateLinkResourceListResult or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.PrivateLinkResourceListResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -84,38 +79,34 @@ async def list_by_storage_sync_service(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateLinkResourceListResult]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.PrivateLinkResourceListResult] = kwargs.pop("cls", None)
- request = build_list_by_storage_sync_service_request(
+ _request = build_list_by_storage_sync_service_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_storage_sync_service.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response)
+ deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- return deserialized
-
- list_by_storage_sync_service.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/privateLinkResources"} # type: ignore
+ return deserialized # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_registered_servers_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_registered_servers_operations.py
index 94a9e863db42..1f735e512cba 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_registered_servers_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_registered_servers_operations.py
@@ -6,8 +6,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -17,12 +18,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -30,20 +32,19 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._registered_servers_operations import (
build_create_request,
build_delete_request,
build_get_request,
build_list_by_storage_sync_service_request,
build_trigger_rollover_request,
+ build_update_request,
)
-from .._vendor import MicrosoftStorageSyncMixinABC
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -78,7 +79,6 @@ def list_by_storage_sync_service(
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RegisteredServer or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storagesync.models.RegisteredServer]
@@ -87,12 +87,10 @@ def list_by_storage_sync_service(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.RegisteredServerArray]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.RegisteredServerArray] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -103,17 +101,15 @@ def list_by_storage_sync_service(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_storage_sync_service_request(
+ _request = build_list_by_storage_sync_service_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_storage_sync_service.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -125,26 +121,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("RegisteredServerArray", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -157,8 +153,6 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list_by_storage_sync_service.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers"} # type: ignore
-
@distributed_trace_async
async def get(
self, resource_group_name: str, storage_sync_service_name: str, server_id: str, **kwargs: Any
@@ -172,12 +166,11 @@ async def get(
:type storage_sync_service_name: str
:param server_id: GUID identifying the on-premises server. Required.
:type server_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: RegisteredServer or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.RegisteredServer
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -188,26 +181,23 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.RegisteredServer]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.RegisteredServer] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
server_id=server_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -223,24 +213,22 @@ async def get(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("RegisteredServer", pipeline_response)
+ deserialized = self._deserialize("RegisteredServer", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
-
- return deserialized
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}"} # type: ignore
+ return deserialized # type: ignore
async def _create_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
server_id: str,
- parameters: Union[_models.RegisteredServerCreateParameters, IO],
+ parameters: Union[_models.RegisteredServerCreateParameters, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.RegisteredServer]:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -251,21 +239,19 @@ async def _create_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.RegisteredServer]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "RegisteredServerCreateParameters")
- request = build_create_request(
+ _request = build_create_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
server_id=server_id,
@@ -274,25 +260,28 @@ async def _create_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
if response.status_code == 200:
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
@@ -300,8 +289,6 @@ async def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("RegisteredServer", pipeline_response)
-
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
@@ -312,12 +299,12 @@ async def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
- return deserialized
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}"} # type: ignore
+ return deserialized # type: ignore
@overload
async def begin_create(
@@ -344,14 +331,6 @@ async def begin_create(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RegisteredServer or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.RegisteredServer]
@@ -364,7 +343,7 @@ async def begin_create(
resource_group_name: str,
storage_sync_service_name: str,
server_id: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -379,18 +358,10 @@ async def begin_create(
:param server_id: GUID identifying the on-premises server. Required.
:type server_id: str
:param parameters: Body of Registered Server object. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RegisteredServer or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.RegisteredServer]
@@ -403,7 +374,7 @@ async def begin_create(
resource_group_name: str,
storage_sync_service_name: str,
server_id: str,
- parameters: Union[_models.RegisteredServerCreateParameters, IO],
+ parameters: Union[_models.RegisteredServerCreateParameters, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.RegisteredServer]:
"""Add a new registered server.
@@ -415,20 +386,238 @@ async def begin_create(
:type storage_sync_service_name: str
:param server_id: GUID identifying the on-premises server. Required.
:type server_id: str
- :param parameters: Body of Registered Server object. Is either a model type or a IO type.
+ :param parameters: Body of Registered Server object. Is either a
+ RegisteredServerCreateParameters type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.RegisteredServerCreateParameters or IO[bytes]
+ :return: An instance of AsyncLROPoller that returns either RegisteredServer or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.RegisteredServer]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.RegisteredServer] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = await self._create_initial(
+ resource_group_name=resource_group_name,
+ storage_sync_service_name=storage_sync_service_name,
+ server_id=server_id,
+ parameters=parameters,
+ api_version=api_version,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ await raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-correlation-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-correlation-request-id")
+ )
+
+ deserialized = self._deserialize("RegisteredServer", pipeline_response.http_response)
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+ return deserialized
+
+ if polling is True:
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return AsyncLROPoller[_models.RegisteredServer].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return AsyncLROPoller[_models.RegisteredServer](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ async def _update_initial(
+ self,
+ resource_group_name: str,
+ storage_sync_service_name: str,
+ server_id: str,
+ parameters: Union[_models.RegisteredServerUpdateParameters, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(parameters, (IOBase, bytes)):
+ _content = parameters
+ else:
+ _json = self._serialize.body(parameters, "RegisteredServerUpdateParameters")
+
+ _request = build_update_request(
+ resource_group_name=resource_group_name,
+ storage_sync_service_name=storage_sync_service_name,
+ server_id=server_id,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ content_type=content_type,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-correlation-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-correlation-request-id")
+ )
+
+ if response.status_code == 202:
+ response_headers["Azure-AsyncOperation"] = self._deserialize(
+ "str", response.headers.get("Azure-AsyncOperation")
+ )
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-correlation-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-correlation-request-id")
+ )
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ storage_sync_service_name: str,
+ server_id: str,
+ parameters: _models.RegisteredServerUpdateParameters,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.RegisteredServer]:
+ """Update registered server.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param storage_sync_service_name: Name of Storage Sync Service resource. Required.
+ :type storage_sync_service_name: str
+ :param server_id: GUID identifying the on-premises server. Required.
+ :type server_id: str
+ :param parameters: Body of Registered Server object. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.RegisteredServerUpdateParameters
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns either RegisteredServer or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.RegisteredServer]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ storage_sync_service_name: str,
+ server_id: str,
+ parameters: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.RegisteredServer]:
+ """Update registered server.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
- :type parameters: ~azure.mgmt.storagesync.models.RegisteredServerCreateParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
+ :type resource_group_name: str
+ :param storage_sync_service_name: Name of Storage Sync Service resource. Required.
+ :type storage_sync_service_name: str
+ :param server_id: GUID identifying the on-premises server. Required.
+ :type server_id: str
+ :param parameters: Body of Registered Server object. Required.
+ :type parameters: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either RegisteredServer or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.RegisteredServer]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace_async
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ storage_sync_service_name: str,
+ server_id: str,
+ parameters: Union[_models.RegisteredServerUpdateParameters, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.RegisteredServer]:
+ """Update registered server.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param storage_sync_service_name: Name of Storage Sync Service resource. Required.
+ :type storage_sync_service_name: str
+ :param server_id: GUID identifying the on-premises server. Required.
+ :type server_id: str
+ :param parameters: Body of Registered Server object. Is either a
+ RegisteredServerUpdateParameters type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.RegisteredServerUpdateParameters or IO[bytes]
:return: An instance of AsyncLROPoller that returns either RegisteredServer or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.RegisteredServer]
@@ -437,16 +626,14 @@ async def begin_create(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.RegisteredServer]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.RegisteredServer] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._create_initial( # type: ignore
+ raw_result = await self._update_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
server_id=server_id,
@@ -458,6 +645,7 @@ async def begin_create(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
@@ -468,32 +656,32 @@ def get_long_running_output(pipeline_response):
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("RegisteredServer", pipeline_response)
+ deserialized = self._deserialize("RegisteredServer", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.RegisteredServer].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}"} # type: ignore
+ return AsyncLROPoller[_models.RegisteredServer](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
+ async def _delete_initial(
self, resource_group_name: str, storage_sync_service_name: str, server_id: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -504,31 +692,33 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
server_id=server_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -547,10 +737,12 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(
@@ -565,14 +757,6 @@ async def begin_delete(
:type storage_sync_service_name: str
:param server_id: GUID identifying the on-premises server. Required.
:type server_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -580,15 +764,13 @@ async def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
server_id=server_id,
@@ -598,38 +780,37 @@ async def begin_delete(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}"} # type: ignore
-
- async def _trigger_rollover_initial( # pylint: disable=inconsistent-return-statements
+ async def _trigger_rollover_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
server_id: str,
- parameters: Union[_models.TriggerRolloverRequest, IO],
+ parameters: Union[_models.TriggerRolloverRequest, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -640,21 +821,19 @@ async def _trigger_rollover_initial( # pylint: disable=inconsistent-return-stat
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "TriggerRolloverRequest")
- request = build_trigger_rollover_request(
+ _request = build_trigger_rollover_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
server_id=server_id,
@@ -663,20 +842,24 @@ async def _trigger_rollover_initial( # pylint: disable=inconsistent-return-stat
content_type=content_type,
json=_json,
content=_content,
- template_url=self._trigger_rollover_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -695,10 +878,12 @@ async def _trigger_rollover_initial( # pylint: disable=inconsistent-return-stat
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _trigger_rollover_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}/triggerRollover"} # type: ignore
+ return deserialized # type: ignore
@overload
async def begin_trigger_rollover(
@@ -725,14 +910,6 @@ async def begin_trigger_rollover(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -744,7 +921,7 @@ async def begin_trigger_rollover(
resource_group_name: str,
storage_sync_service_name: str,
server_id: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -759,18 +936,10 @@ async def begin_trigger_rollover(
:param server_id: Server Id. Required.
:type server_id: str
:param parameters: Body of Trigger Rollover request. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -782,7 +951,7 @@ async def begin_trigger_rollover(
resource_group_name: str,
storage_sync_service_name: str,
server_id: str,
- parameters: Union[_models.TriggerRolloverRequest, IO],
+ parameters: Union[_models.TriggerRolloverRequest, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Triggers Server certificate rollover.
@@ -794,20 +963,9 @@ async def begin_trigger_rollover(
:type storage_sync_service_name: str
:param server_id: Server Id. Required.
:type server_id: str
- :param parameters: Body of Trigger Rollover request. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.storagesync.models.TriggerRolloverRequest or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Trigger Rollover request. Is either a TriggerRolloverRequest type or
+ a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.TriggerRolloverRequest or IO[bytes]
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -815,16 +973,14 @@ async def begin_trigger_rollover(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._trigger_rollover_initial( # type: ignore
+ raw_result = await self._trigger_rollover_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
server_id=server_id,
@@ -836,25 +992,24 @@ async def begin_trigger_rollover(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_trigger_rollover.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}/triggerRollover"} # type: ignore
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_server_endpoints_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_server_endpoints_operations.py
index b97397efeaea..e2229470bfc9 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_server_endpoints_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_server_endpoints_operations.py
@@ -6,8 +6,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -17,12 +18,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -30,7 +32,6 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._server_endpoints_operations import (
build_create_request,
build_delete_request,
@@ -39,12 +40,11 @@
build_recall_action_request,
build_update_request,
)
-from .._vendor import MicrosoftStorageSyncMixinABC
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -74,10 +74,10 @@ async def _create_initial(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: Union[_models.ServerEndpointCreateParameters, IO],
+ parameters: Union[_models.ServerEndpointCreateParameters, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.ServerEndpoint]:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -88,21 +88,19 @@ async def _create_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.ServerEndpoint]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ServerEndpointCreateParameters")
- request = build_create_request(
+ _request = build_create_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -112,25 +110,28 @@ async def _create_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
if response.status_code == 200:
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
@@ -138,8 +139,6 @@ async def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("ServerEndpoint", pipeline_response)
-
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
@@ -150,12 +149,12 @@ async def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
- return deserialized
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}"} # type: ignore
+ return deserialized # type: ignore
@overload
async def begin_create(
@@ -185,14 +184,6 @@ async def begin_create(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ServerEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.ServerEndpoint]
@@ -206,7 +197,7 @@ async def begin_create(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -223,18 +214,10 @@ async def begin_create(
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
:param parameters: Body of Server Endpoint object. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ServerEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.ServerEndpoint]
@@ -248,7 +231,7 @@ async def begin_create(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: Union[_models.ServerEndpointCreateParameters, IO],
+ parameters: Union[_models.ServerEndpointCreateParameters, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.ServerEndpoint]:
"""Create a new ServerEndpoint.
@@ -262,20 +245,9 @@ async def begin_create(
:type sync_group_name: str
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
- :param parameters: Body of Server Endpoint object. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.storagesync.models.ServerEndpointCreateParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Server Endpoint object. Is either a ServerEndpointCreateParameters
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.ServerEndpointCreateParameters or IO[bytes]
:return: An instance of AsyncLROPoller that returns either ServerEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.ServerEndpoint]
@@ -284,16 +256,14 @@ async def begin_create(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.ServerEndpoint]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.ServerEndpoint] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._create_initial( # type: ignore
+ raw_result = await self._create_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -306,6 +276,7 @@ async def begin_create(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
@@ -316,27 +287,27 @@ def get_long_running_output(pipeline_response):
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("ServerEndpoint", pipeline_response)
+ deserialized = self._deserialize("ServerEndpoint", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.ServerEndpoint].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}"} # type: ignore
+ return AsyncLROPoller[_models.ServerEndpoint](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
async def _update_initial(
self,
@@ -344,10 +315,10 @@ async def _update_initial(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: Optional[Union[_models.ServerEndpointUpdateParameters, IO]] = None,
+ parameters: Optional[Union[_models.ServerEndpointUpdateParameters, IO[bytes]]] = None,
**kwargs: Any
- ) -> Optional[_models.ServerEndpoint]:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -358,16 +329,14 @@ async def _update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.ServerEndpoint]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
if parameters is not None:
@@ -375,7 +344,7 @@ async def _update_initial(
else:
_json = None
- request = build_update_request(
+ _request = build_update_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -385,25 +354,28 @@ async def _update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
if response.status_code == 200:
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
@@ -411,8 +383,6 @@ async def _update_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("ServerEndpoint", pipeline_response)
-
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
@@ -423,12 +393,12 @@ async def _update_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
- return deserialized
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}"} # type: ignore
+ return deserialized # type: ignore
@overload
async def begin_update(
@@ -458,14 +428,6 @@ async def begin_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ServerEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.ServerEndpoint]
@@ -479,7 +441,7 @@ async def begin_update(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: Optional[IO] = None,
+ parameters: Optional[IO[bytes]] = None,
*,
content_type: str = "application/json",
**kwargs: Any
@@ -496,18 +458,10 @@ async def begin_update(
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
:param parameters: Any of the properties applicable in PUT request. Default value is None.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ServerEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.ServerEndpoint]
@@ -521,7 +475,7 @@ async def begin_update(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: Optional[Union[_models.ServerEndpointUpdateParameters, IO]] = None,
+ parameters: Optional[Union[_models.ServerEndpointUpdateParameters, IO[bytes]]] = None,
**kwargs: Any
) -> AsyncLROPoller[_models.ServerEndpoint]:
"""Patch a given ServerEndpoint.
@@ -535,20 +489,9 @@ async def begin_update(
:type sync_group_name: str
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
- :param parameters: Any of the properties applicable in PUT request. Is either a model type or a
- IO type. Default value is None.
- :type parameters: ~azure.mgmt.storagesync.models.ServerEndpointUpdateParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Any of the properties applicable in PUT request. Is either a
+ ServerEndpointUpdateParameters type or a IO[bytes] type. Default value is None.
+ :type parameters: ~azure.mgmt.storagesync.models.ServerEndpointUpdateParameters or IO[bytes]
:return: An instance of AsyncLROPoller that returns either ServerEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.ServerEndpoint]
@@ -557,16 +500,14 @@ async def begin_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.ServerEndpoint]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.ServerEndpoint] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._update_initial( # type: ignore
+ raw_result = await self._update_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -579,6 +520,7 @@ async def begin_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
@@ -589,27 +531,27 @@ def get_long_running_output(pipeline_response):
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("ServerEndpoint", pipeline_response)
+ deserialized = self._deserialize("ServerEndpoint", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.ServerEndpoint].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}"} # type: ignore
+ return AsyncLROPoller[_models.ServerEndpoint](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace_async
async def get(
@@ -631,12 +573,11 @@ async def get(
:type sync_group_name: str
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ServerEndpoint or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.ServerEndpoint
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -647,27 +588,24 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.ServerEndpoint]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.ServerEndpoint] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
server_endpoint_name=server_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -683,24 +621,22 @@ async def get(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("ServerEndpoint", pipeline_response)
+ deserialized = self._deserialize("ServerEndpoint", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- return deserialized
+ return deserialized # type: ignore
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}"} # type: ignore
-
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
+ async def _delete_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -711,32 +647,34 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
server_endpoint_name=server_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -755,10 +693,12 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(
@@ -780,14 +720,6 @@ async def begin_delete(
:type sync_group_name: str
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -795,15 +727,13 @@ async def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -814,28 +744,27 @@ async def begin_delete(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}"} # type: ignore
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace
def list_by_sync_group(
@@ -850,7 +779,6 @@ def list_by_sync_group(
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ServerEndpoint or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storagesync.models.ServerEndpoint]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -858,12 +786,10 @@ def list_by_sync_group(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.ServerEndpointArray]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.ServerEndpointArray] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -874,18 +800,16 @@ def list_by_sync_group(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_sync_group_request(
+ _request = build_list_by_sync_group_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_sync_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -897,26 +821,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ServerEndpointArray", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -929,18 +853,16 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list_by_sync_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints"} # type: ignore
-
- async def _recall_action_initial( # pylint: disable=inconsistent-return-statements
+ async def _recall_action_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: Union[_models.RecallActionParameters, IO],
+ parameters: Union[_models.RecallActionParameters, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -951,21 +873,19 @@ async def _recall_action_initial( # pylint: disable=inconsistent-return-stateme
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "RecallActionParameters")
- request = build_recall_action_request(
+ _request = build_recall_action_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -975,20 +895,24 @@ async def _recall_action_initial( # pylint: disable=inconsistent-return-stateme
content_type=content_type,
json=_json,
content=_content,
- template_url=self._recall_action_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -1007,10 +931,12 @@ async def _recall_action_initial( # pylint: disable=inconsistent-return-stateme
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _recall_action_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}/recallAction"} # type: ignore
+ return deserialized # type: ignore
@overload
async def begin_recall_action(
@@ -1040,14 +966,6 @@ async def begin_recall_action(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1060,7 +978,7 @@ async def begin_recall_action(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -1077,18 +995,10 @@ async def begin_recall_action(
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
:param parameters: Body of Recall Action object. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1101,7 +1011,7 @@ async def begin_recall_action(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: Union[_models.RecallActionParameters, IO],
+ parameters: Union[_models.RecallActionParameters, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Recall a server endpoint.
@@ -1115,19 +1025,9 @@ async def begin_recall_action(
:type sync_group_name: str
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
- :param parameters: Body of Recall Action object. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.storagesync.models.RecallActionParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Recall Action object. Is either a RecallActionParameters type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.RecallActionParameters or IO[bytes]
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1135,16 +1035,14 @@ async def begin_recall_action(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._recall_action_initial( # type: ignore
+ raw_result = await self._recall_action_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1157,25 +1055,24 @@ async def begin_recall_action(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_recall_action.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}/recallAction"} # type: ignore
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_storage_sync_services_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_storage_sync_services_operations.py
index 9288d75de90e..f7395d21ad55 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_storage_sync_services_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_storage_sync_services_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,8 +5,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -17,12 +17,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -30,7 +31,6 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._storage_sync_services_operations import (
build_check_name_availability_request,
build_create_request,
@@ -40,12 +40,11 @@
build_list_by_subscription_request,
build_update_request,
)
-from .._vendor import MicrosoftStorageSyncMixinABC
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -87,7 +86,6 @@ async def check_name_availability(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: CheckNameAvailabilityResult or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.CheckNameAvailabilityResult
:raises ~azure.core.exceptions.HttpResponseError:
@@ -95,18 +93,17 @@ async def check_name_availability(
@overload
async def check_name_availability(
- self, location_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
+ self, location_name: str, parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any
) -> _models.CheckNameAvailabilityResult:
"""Check the give namespace name availability.
:param location_name: The desired region for the name check. Required.
:type location_name: str
:param parameters: Parameters to check availability of the given namespace name. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: CheckNameAvailabilityResult or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.CheckNameAvailabilityResult
:raises ~azure.core.exceptions.HttpResponseError:
@@ -114,24 +111,20 @@ async def check_name_availability(
@distributed_trace_async
async def check_name_availability(
- self, location_name: str, parameters: Union[_models.CheckNameAvailabilityParameters, IO], **kwargs: Any
+ self, location_name: str, parameters: Union[_models.CheckNameAvailabilityParameters, IO[bytes]], **kwargs: Any
) -> _models.CheckNameAvailabilityResult:
"""Check the give namespace name availability.
:param location_name: The desired region for the name check. Required.
:type location_name: str
:param parameters: Parameters to check availability of the given namespace name. Is either a
- model type or a IO type. Required.
- :type parameters: ~azure.mgmt.storagesync.models.CheckNameAvailabilityParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ CheckNameAvailabilityParameters type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.CheckNameAvailabilityParameters or IO[bytes]
:return: CheckNameAvailabilityResult or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.CheckNameAvailabilityResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -142,61 +135,57 @@ async def check_name_availability(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.CheckNameAvailabilityResult]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.CheckNameAvailabilityResult] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "CheckNameAvailabilityParameters")
- request = build_check_name_availability_request(
+ _request = build_check_name_availability_request(
location_name=location_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
- template_url=self.check_name_availability.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("CheckNameAvailabilityResult", pipeline_response)
+ deserialized = self._deserialize("CheckNameAvailabilityResult", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- return deserialized
-
- check_name_availability.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StorageSync/locations/{locationName}/checkNameAvailability"} # type: ignore
+ return deserialized # type: ignore
async def _create_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
- parameters: Union[_models.StorageSyncServiceCreateParameters, IO],
+ parameters: Union[_models.StorageSyncServiceCreateParameters, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.StorageSyncService]:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -207,21 +196,19 @@ async def _create_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.StorageSyncService]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "StorageSyncServiceCreateParameters")
- request = build_create_request(
+ _request = build_create_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
@@ -229,29 +216,29 @@ async def _create_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
- if response.status_code == 200:
- deserialized = self._deserialize("StorageSyncService", pipeline_response)
-
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
@@ -263,12 +250,12 @@ async def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
- return deserialized
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}"} # type: ignore
+ return deserialized # type: ignore
@overload
async def begin_create(
@@ -292,14 +279,6 @@ async def begin_create(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either StorageSyncService or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.StorageSyncService]
@@ -311,7 +290,7 @@ async def begin_create(
self,
resource_group_name: str,
storage_sync_service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -324,18 +303,10 @@ async def begin_create(
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param parameters: Storage Sync Service resource name. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either StorageSyncService or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.StorageSyncService]
@@ -347,7 +318,7 @@ async def begin_create(
self,
resource_group_name: str,
storage_sync_service_name: str,
- parameters: Union[_models.StorageSyncServiceCreateParameters, IO],
+ parameters: Union[_models.StorageSyncServiceCreateParameters, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.StorageSyncService]:
"""Create a new StorageSyncService.
@@ -357,20 +328,10 @@ async def begin_create(
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :param parameters: Storage Sync Service resource name. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.storagesync.models.StorageSyncServiceCreateParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Storage Sync Service resource name. Is either a
+ StorageSyncServiceCreateParameters type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.StorageSyncServiceCreateParameters or
+ IO[bytes]
:return: An instance of AsyncLROPoller that returns either StorageSyncService or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.StorageSyncService]
@@ -379,16 +340,14 @@ async def begin_create(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageSyncService]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.StorageSyncService] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._create_initial( # type: ignore
+ raw_result = await self._create_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
parameters=parameters,
@@ -399,30 +358,31 @@ async def begin_create(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("StorageSyncService", pipeline_response)
+ deserialized = self._deserialize("StorageSyncService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.StorageSyncService].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}"} # type: ignore
+ return AsyncLROPoller[_models.StorageSyncService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace_async
async def get(
@@ -435,12 +395,11 @@ async def get(
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageSyncService or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.StorageSyncService
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -451,25 +410,22 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageSyncService]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.StorageSyncService] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -485,23 +441,21 @@ async def get(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("StorageSyncService", pipeline_response)
+ deserialized = self._deserialize("StorageSyncService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- return deserialized
-
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}"} # type: ignore
+ return deserialized # type: ignore
async def _update_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
- parameters: Optional[Union[_models.StorageSyncServiceUpdateParameters, IO]] = None,
+ parameters: Optional[Union[_models.StorageSyncServiceUpdateParameters, IO[bytes]]] = None,
**kwargs: Any
- ) -> Optional[_models.StorageSyncService]:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -512,16 +466,14 @@ async def _update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.StorageSyncService]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
if parameters is not None:
@@ -529,7 +481,7 @@ async def _update_initial(
else:
_json = None
- request = build_update_request(
+ _request = build_update_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
@@ -537,25 +489,28 @@ async def _update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
if response.status_code == 200:
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
@@ -563,8 +518,6 @@ async def _update_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("StorageSyncService", pipeline_response)
-
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
@@ -576,12 +529,12 @@ async def _update_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
- return deserialized
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}"} # type: ignore
+ return deserialized # type: ignore
@overload
async def begin_update(
@@ -605,14 +558,6 @@ async def begin_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either StorageSyncService or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.StorageSyncService]
@@ -624,7 +569,7 @@ async def begin_update(
self,
resource_group_name: str,
storage_sync_service_name: str,
- parameters: Optional[IO] = None,
+ parameters: Optional[IO[bytes]] = None,
*,
content_type: str = "application/json",
**kwargs: Any
@@ -637,18 +582,10 @@ async def begin_update(
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param parameters: Storage Sync Service resource. Default value is None.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either StorageSyncService or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.StorageSyncService]
@@ -660,7 +597,7 @@ async def begin_update(
self,
resource_group_name: str,
storage_sync_service_name: str,
- parameters: Optional[Union[_models.StorageSyncServiceUpdateParameters, IO]] = None,
+ parameters: Optional[Union[_models.StorageSyncServiceUpdateParameters, IO[bytes]]] = None,
**kwargs: Any
) -> AsyncLROPoller[_models.StorageSyncService]:
"""Patch a given StorageSyncService.
@@ -670,20 +607,10 @@ async def begin_update(
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :param parameters: Storage Sync Service resource. Is either a model type or a IO type. Default
- value is None.
- :type parameters: ~azure.mgmt.storagesync.models.StorageSyncServiceUpdateParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Storage Sync Service resource. Is either a
+ StorageSyncServiceUpdateParameters type or a IO[bytes] type. Default value is None.
+ :type parameters: ~azure.mgmt.storagesync.models.StorageSyncServiceUpdateParameters or
+ IO[bytes]
:return: An instance of AsyncLROPoller that returns either StorageSyncService or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagesync.models.StorageSyncService]
@@ -692,16 +619,14 @@ async def begin_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageSyncService]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.StorageSyncService] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._update_initial( # type: ignore
+ raw_result = await self._update_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
parameters=parameters,
@@ -712,6 +637,7 @@ async def begin_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
@@ -722,32 +648,32 @@ def get_long_running_output(pipeline_response):
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("StorageSyncService", pipeline_response)
+ deserialized = self._deserialize("StorageSyncService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.StorageSyncService].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}"} # type: ignore
+ return AsyncLROPoller[_models.StorageSyncService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
+ async def _delete_initial(
self, resource_group_name: str, storage_sync_service_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -758,30 +684,32 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -804,10 +732,12 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(
@@ -820,14 +750,6 @@ async def begin_delete(
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -835,15 +757,13 @@ async def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
api_version=api_version,
@@ -852,28 +772,27 @@ async def begin_delete(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod
+ polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}"} # type: ignore
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace
def list_by_resource_group(
@@ -884,7 +803,6 @@ def list_by_resource_group(
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StorageSyncService or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storagesync.models.StorageSyncService]
@@ -893,12 +811,10 @@ def list_by_resource_group(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageSyncServiceArray]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.StorageSyncServiceArray] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -909,16 +825,14 @@ def list_by_resource_group(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_resource_group_request(
+ _request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -930,26 +844,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("StorageSyncServiceArray", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -962,13 +876,10 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices"} # type: ignore
-
@distributed_trace
def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.StorageSyncService"]:
"""Get a StorageSyncService list by subscription.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StorageSyncService or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storagesync.models.StorageSyncService]
@@ -977,12 +888,10 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.StorageS
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageSyncServiceArray]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.StorageSyncServiceArray] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -993,15 +902,13 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.StorageS
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_subscription_request(
+ _request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_subscription.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1013,26 +920,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("StorageSyncServiceArray", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1044,5 +951,3 @@ async def get_next(next_link=None):
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
-
- list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StorageSync/storageSyncServices"} # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_sync_groups_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_sync_groups_operations.py
index 2741c4647fbf..bdec7cdd30b4 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_sync_groups_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_sync_groups_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
@@ -20,27 +20,24 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._sync_groups_operations import (
build_create_request,
build_delete_request,
build_get_request,
build_list_by_storage_sync_service_request,
)
-from .._vendor import MicrosoftStorageSyncMixinABC
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -75,7 +72,6 @@ def list_by_storage_sync_service(
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SyncGroup or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storagesync.models.SyncGroup]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -83,12 +79,10 @@ def list_by_storage_sync_service(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.SyncGroupArray]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.SyncGroupArray] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -99,17 +93,15 @@ def list_by_storage_sync_service(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_storage_sync_service_request(
+ _request = build_list_by_storage_sync_service_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_storage_sync_service.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -121,26 +113,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("SyncGroupArray", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -153,8 +145,6 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list_by_storage_sync_service.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups"} # type: ignore
-
@overload
async def create(
self,
@@ -180,7 +170,6 @@ async def create(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: SyncGroup or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.SyncGroup
:raises ~azure.core.exceptions.HttpResponseError:
@@ -192,7 +181,7 @@ async def create(
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -207,11 +196,10 @@ async def create(
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param parameters: Sync Group Body. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: SyncGroup or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.SyncGroup
:raises ~azure.core.exceptions.HttpResponseError:
@@ -223,7 +211,7 @@ async def create(
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
- parameters: Union[_models.SyncGroupCreateParameters, IO],
+ parameters: Union[_models.SyncGroupCreateParameters, IO[bytes]],
**kwargs: Any
) -> _models.SyncGroup:
"""Create a new SyncGroup.
@@ -235,17 +223,14 @@ async def create(
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
- :param parameters: Sync Group Body. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.storagesync.models.SyncGroupCreateParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Sync Group Body. Is either a SyncGroupCreateParameters type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.SyncGroupCreateParameters or IO[bytes]
:return: SyncGroup or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.SyncGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -256,21 +241,19 @@ async def create(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.SyncGroup]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.SyncGroup] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "SyncGroupCreateParameters")
- request = build_create_request(
+ _request = build_create_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -279,15 +262,14 @@ async def create(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.create.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -303,14 +285,12 @@ async def create(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("SyncGroup", pipeline_response)
+ deserialized = self._deserialize("SyncGroup", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
-
- return deserialized
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace_async
async def get(
@@ -325,12 +305,11 @@ async def get(
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: SyncGroup or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.SyncGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -341,26 +320,23 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.SyncGroup]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.SyncGroup] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -376,17 +352,15 @@ async def get(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("SyncGroup", pipeline_response)
+ deserialized = self._deserialize("SyncGroup", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- return deserialized
-
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace_async
- async def delete( # pylint: disable=inconsistent-return-statements
+ async def delete(
self, resource_group_name: str, storage_sync_service_name: str, sync_group_name: str, **kwargs: Any
) -> None:
"""Delete a given SyncGroup.
@@ -398,12 +372,11 @@ async def delete( # pylint: disable=inconsistent-return-statements
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -414,26 +387,23 @@ async def delete( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -451,6 +421,4 @@ async def delete( # pylint: disable=inconsistent-return-statements
)
if cls:
- return cls(pipeline_response, None, response_headers)
-
- delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}"} # type: ignore
+ return cls(pipeline_response, None, response_headers) # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_workflows_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_workflows_operations.py
index 649202af349d..af28d5ec27f2 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_workflows_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/aio/operations/_workflows_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -20,26 +19,23 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._workflows_operations import (
build_abort_request,
build_get_request,
build_list_by_storage_sync_service_request,
)
-from .._vendor import MicrosoftStorageSyncMixinABC
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -74,7 +70,6 @@ def list_by_storage_sync_service(
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Workflow or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storagesync.models.Workflow]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -82,12 +77,10 @@ def list_by_storage_sync_service(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.WorkflowArray]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.WorkflowArray] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -98,17 +91,15 @@ def list_by_storage_sync_service(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_storage_sync_service_request(
+ _request = build_list_by_storage_sync_service_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_storage_sync_service.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -120,26 +111,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("WorkflowArray", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -152,8 +143,6 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list_by_storage_sync_service.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/workflows"} # type: ignore
-
@distributed_trace_async
async def get(
self, resource_group_name: str, storage_sync_service_name: str, workflow_id: str, **kwargs: Any
@@ -167,12 +156,11 @@ async def get(
:type storage_sync_service_name: str
:param workflow_id: workflow Id. Required.
:type workflow_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Workflow or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.Workflow
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -183,26 +171,23 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.Workflow]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.Workflow] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
workflow_id=workflow_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -218,17 +203,15 @@ async def get(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("Workflow", pipeline_response)
+ deserialized = self._deserialize("Workflow", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
-
- return deserialized
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/workflows/{workflowId}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace_async
- async def abort( # pylint: disable=inconsistent-return-statements
+ async def abort(
self, resource_group_name: str, storage_sync_service_name: str, workflow_id: str, **kwargs: Any
) -> None:
"""Abort the given workflow.
@@ -240,12 +223,11 @@ async def abort( # pylint: disable=inconsistent-return-statements
:type storage_sync_service_name: str
:param workflow_id: workflow Id. Required.
:type workflow_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -256,26 +238,23 @@ async def abort( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
- request = build_abort_request(
+ _request = build_abort_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
workflow_id=workflow_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.abort.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -292,6 +271,4 @@ async def abort( # pylint: disable=inconsistent-return-statements
)
if cls:
- return cls(pipeline_response, None, response_headers)
-
- abort.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/workflows/{workflowId}/abort"} # type: ignore
+ return cls(pipeline_response, None, response_headers) # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/models/__init__.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/models/__init__.py
index eb25b6dceef9..96c62bcbb6d7 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/models/__init__.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/models/__init__.py
@@ -5,106 +5,128 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._models_py3 import BackupRequest
-from ._models_py3 import CheckNameAvailabilityParameters
-from ._models_py3 import CheckNameAvailabilityResult
-from ._models_py3 import CloudEndpoint
-from ._models_py3 import CloudEndpointAfsShareMetadataCertificatePublicKeys
-from ._models_py3 import CloudEndpointArray
-from ._models_py3 import CloudEndpointChangeEnumerationActivity
-from ._models_py3 import CloudEndpointChangeEnumerationStatus
-from ._models_py3 import CloudEndpointCreateParameters
-from ._models_py3 import CloudEndpointLastChangeEnumerationStatus
-from ._models_py3 import CloudTieringCachePerformance
-from ._models_py3 import CloudTieringDatePolicyStatus
-from ._models_py3 import CloudTieringFilesNotTiering
-from ._models_py3 import CloudTieringLowDiskMode
-from ._models_py3 import CloudTieringSpaceSavings
-from ._models_py3 import CloudTieringVolumeFreeSpacePolicyStatus
-from ._models_py3 import FilesNotTieringError
-from ._models_py3 import LocationOperationStatus
-from ._models_py3 import OperationDisplayInfo
-from ._models_py3 import OperationDisplayResource
-from ._models_py3 import OperationEntity
-from ._models_py3 import OperationEntityListResult
-from ._models_py3 import OperationProperties
-from ._models_py3 import OperationResourceMetricSpecification
-from ._models_py3 import OperationResourceMetricSpecificationDimension
-from ._models_py3 import OperationResourceServiceSpecification
-from ._models_py3 import OperationStatus
-from ._models_py3 import PostBackupResponse
-from ._models_py3 import PostRestoreRequest
-from ._models_py3 import PreRestoreRequest
-from ._models_py3 import PrivateEndpoint
-from ._models_py3 import PrivateEndpointConnection
-from ._models_py3 import PrivateEndpointConnectionListResult
-from ._models_py3 import PrivateLinkResource
-from ._models_py3 import PrivateLinkResourceListResult
-from ._models_py3 import PrivateLinkServiceConnectionState
-from ._models_py3 import ProxyResource
-from ._models_py3 import RecallActionParameters
-from ._models_py3 import RegisteredServer
-from ._models_py3 import RegisteredServerArray
-from ._models_py3 import RegisteredServerCreateParameters
-from ._models_py3 import Resource
-from ._models_py3 import ResourcesMoveInfo
-from ._models_py3 import RestoreFileSpec
-from ._models_py3 import ServerEndpoint
-from ._models_py3 import ServerEndpointArray
-from ._models_py3 import ServerEndpointBackgroundDataDownloadActivity
-from ._models_py3 import ServerEndpointCloudTieringStatus
-from ._models_py3 import ServerEndpointCreateParameters
-from ._models_py3 import ServerEndpointFilesNotSyncingError
-from ._models_py3 import ServerEndpointRecallError
-from ._models_py3 import ServerEndpointRecallStatus
-from ._models_py3 import ServerEndpointSyncActivityStatus
-from ._models_py3 import ServerEndpointSyncSessionStatus
-from ._models_py3 import ServerEndpointSyncStatus
-from ._models_py3 import ServerEndpointUpdateParameters
-from ._models_py3 import StorageSyncApiError
-from ._models_py3 import StorageSyncError
-from ._models_py3 import StorageSyncErrorDetails
-from ._models_py3 import StorageSyncInnerErrorDetails
-from ._models_py3 import StorageSyncService
-from ._models_py3 import StorageSyncServiceArray
-from ._models_py3 import StorageSyncServiceCreateParameters
-from ._models_py3 import StorageSyncServiceUpdateParameters
-from ._models_py3 import SubscriptionState
-from ._models_py3 import SyncGroup
-from ._models_py3 import SyncGroupArray
-from ._models_py3 import SyncGroupCreateParameters
-from ._models_py3 import SystemData
-from ._models_py3 import TrackedResource
-from ._models_py3 import TriggerChangeDetectionParameters
-from ._models_py3 import TriggerRolloverRequest
-from ._models_py3 import Workflow
-from ._models_py3 import WorkflowArray
+from typing import TYPE_CHECKING
-from ._microsoft_storage_sync_enums import ChangeDetectionMode
-from ._microsoft_storage_sync_enums import CloudEndpointChangeEnumerationActivityState
-from ._microsoft_storage_sync_enums import CloudEndpointChangeEnumerationTotalCountsState
-from ._microsoft_storage_sync_enums import CloudTieringLowDiskModeState
-from ._microsoft_storage_sync_enums import CreatedByType
-from ._microsoft_storage_sync_enums import FeatureStatus
-from ._microsoft_storage_sync_enums import IncomingTrafficPolicy
-from ._microsoft_storage_sync_enums import InitialDownloadPolicy
-from ._microsoft_storage_sync_enums import InitialUploadPolicy
-from ._microsoft_storage_sync_enums import LocalCacheMode
-from ._microsoft_storage_sync_enums import NameAvailabilityReason
-from ._microsoft_storage_sync_enums import OperationDirection
-from ._microsoft_storage_sync_enums import PrivateEndpointConnectionProvisioningState
-from ._microsoft_storage_sync_enums import PrivateEndpointServiceConnectionStatus
-from ._microsoft_storage_sync_enums import ProgressType
-from ._microsoft_storage_sync_enums import Reason
-from ._microsoft_storage_sync_enums import RegisteredServerAgentVersionStatus
-from ._microsoft_storage_sync_enums import ServerEndpointHealthState
-from ._microsoft_storage_sync_enums import ServerEndpointOfflineDataTransferState
-from ._microsoft_storage_sync_enums import ServerEndpointSyncActivityState
-from ._microsoft_storage_sync_enums import ServerEndpointSyncMode
-from ._microsoft_storage_sync_enums import WorkflowStatus
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+
+from ._models_py3 import ( # type: ignore
+ BackupRequest,
+ CheckNameAvailabilityParameters,
+ CheckNameAvailabilityResult,
+ CloudEndpoint,
+ CloudEndpointAfsShareMetadataCertificatePublicKeys,
+ CloudEndpointArray,
+ CloudEndpointChangeEnumerationActivity,
+ CloudEndpointChangeEnumerationStatus,
+ CloudEndpointCreateParameters,
+ CloudEndpointLastChangeEnumerationStatus,
+ CloudTieringCachePerformance,
+ CloudTieringDatePolicyStatus,
+ CloudTieringFilesNotTiering,
+ CloudTieringLowDiskMode,
+ CloudTieringSpaceSavings,
+ CloudTieringVolumeFreeSpacePolicyStatus,
+ ErrorAdditionalInfo,
+ ErrorDetail,
+ ErrorResponse,
+ FilesNotTieringError,
+ LocationOperationStatus,
+ ManagedServiceIdentity,
+ OperationDisplayInfo,
+ OperationDisplayResource,
+ OperationEntity,
+ OperationEntityListResult,
+ OperationProperties,
+ OperationResourceMetricSpecification,
+ OperationResourceMetricSpecificationDimension,
+ OperationResourceServiceSpecification,
+ OperationStatus,
+ PostBackupResponse,
+ PostRestoreRequest,
+ PreRestoreRequest,
+ PrivateEndpoint,
+ PrivateEndpointConnection,
+ PrivateEndpointConnectionListResult,
+ PrivateLinkResource,
+ PrivateLinkResourceListResult,
+ PrivateLinkServiceConnectionState,
+ ProxyResource,
+ RecallActionParameters,
+ RegisteredServer,
+ RegisteredServerArray,
+ RegisteredServerCreateParameters,
+ RegisteredServerUpdateParameters,
+ Resource,
+ ResourcesMoveInfo,
+ RestoreFileSpec,
+ ServerEndpoint,
+ ServerEndpointArray,
+ ServerEndpointBackgroundDataDownloadActivity,
+ ServerEndpointCloudTieringStatus,
+ ServerEndpointCreateParameters,
+ ServerEndpointFilesNotSyncingError,
+ ServerEndpointProvisioningStatus,
+ ServerEndpointProvisioningStepStatus,
+ ServerEndpointRecallError,
+ ServerEndpointRecallStatus,
+ ServerEndpointSyncActivityStatus,
+ ServerEndpointSyncSessionStatus,
+ ServerEndpointSyncStatus,
+ ServerEndpointUpdateParameters,
+ StorageSyncApiError,
+ StorageSyncError,
+ StorageSyncErrorDetails,
+ StorageSyncInnerErrorDetails,
+ StorageSyncService,
+ StorageSyncServiceArray,
+ StorageSyncServiceCreateParameters,
+ StorageSyncServiceUpdateParameters,
+ SubscriptionState,
+ SyncGroup,
+ SyncGroupArray,
+ SyncGroupCreateParameters,
+ SystemData,
+ TrackedResource,
+ TriggerChangeDetectionParameters,
+ TriggerRolloverRequest,
+ UserAssignedIdentity,
+ Workflow,
+ WorkflowArray,
+)
+
+from ._microsoft_storage_sync_enums import ( # type: ignore
+ ChangeDetectionMode,
+ CloudEndpointChangeEnumerationActivityState,
+ CloudEndpointChangeEnumerationTotalCountsState,
+ CloudTieringLowDiskModeState,
+ CreatedByType,
+ FeatureStatus,
+ IncomingTrafficPolicy,
+ InitialDownloadPolicy,
+ InitialUploadPolicy,
+ LocalCacheMode,
+ ManagedServiceIdentityType,
+ NameAvailabilityReason,
+ OperationDirection,
+ PrivateEndpointConnectionProvisioningState,
+ PrivateEndpointServiceConnectionStatus,
+ ProgressType,
+ Reason,
+ RegisteredServerAgentVersionStatus,
+ ServerAuthType,
+ ServerEndpointHealthState,
+ ServerEndpointOfflineDataTransferState,
+ ServerEndpointSyncActivityState,
+ ServerEndpointSyncMode,
+ ServerProvisioningStatus,
+ WorkflowStatus,
+)
from ._patch import __all__ as _patch_all
-from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -124,8 +146,12 @@
"CloudTieringLowDiskMode",
"CloudTieringSpaceSavings",
"CloudTieringVolumeFreeSpacePolicyStatus",
+ "ErrorAdditionalInfo",
+ "ErrorDetail",
+ "ErrorResponse",
"FilesNotTieringError",
"LocationOperationStatus",
+ "ManagedServiceIdentity",
"OperationDisplayInfo",
"OperationDisplayResource",
"OperationEntity",
@@ -149,6 +175,7 @@
"RegisteredServer",
"RegisteredServerArray",
"RegisteredServerCreateParameters",
+ "RegisteredServerUpdateParameters",
"Resource",
"ResourcesMoveInfo",
"RestoreFileSpec",
@@ -158,6 +185,8 @@
"ServerEndpointCloudTieringStatus",
"ServerEndpointCreateParameters",
"ServerEndpointFilesNotSyncingError",
+ "ServerEndpointProvisioningStatus",
+ "ServerEndpointProvisioningStepStatus",
"ServerEndpointRecallError",
"ServerEndpointRecallStatus",
"ServerEndpointSyncActivityStatus",
@@ -180,6 +209,7 @@
"TrackedResource",
"TriggerChangeDetectionParameters",
"TriggerRolloverRequest",
+ "UserAssignedIdentity",
"Workflow",
"WorkflowArray",
"ChangeDetectionMode",
@@ -192,6 +222,7 @@
"InitialDownloadPolicy",
"InitialUploadPolicy",
"LocalCacheMode",
+ "ManagedServiceIdentityType",
"NameAvailabilityReason",
"OperationDirection",
"PrivateEndpointConnectionProvisioningState",
@@ -199,11 +230,13 @@
"ProgressType",
"Reason",
"RegisteredServerAgentVersionStatus",
+ "ServerAuthType",
"ServerEndpointHealthState",
"ServerEndpointOfflineDataTransferState",
"ServerEndpointSyncActivityState",
"ServerEndpointSyncMode",
+ "ServerProvisioningStatus",
"WorkflowStatus",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/models/_microsoft_storage_sync_enums.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/models/_microsoft_storage_sync_enums.py
index 57202ed8de90..41d379721fbd 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/models/_microsoft_storage_sync_enums.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/models/_microsoft_storage_sync_enums.py
@@ -85,6 +85,17 @@ class LocalCacheMode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
UPDATE_LOCALLY_CACHED_FILES = "UpdateLocallyCachedFiles"
+class ManagedServiceIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of managed service identity (where both SystemAssigned and UserAssigned types are
+ allowed).
+ """
+
+ NONE = "None"
+ SYSTEM_ASSIGNED = "SystemAssigned"
+ USER_ASSIGNED = "UserAssigned"
+ SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned"
+
+
class NameAvailabilityReason(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Gets the reason that a Storage Sync Service name could not be used. The Reason element is only
returned if NameAvailable is false.
@@ -148,6 +159,13 @@ class RegisteredServerAgentVersionStatus(str, Enum, metaclass=CaseInsensitiveEnu
BLOCKED = "Blocked"
+class ServerAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Type of the Server Auth type."""
+
+ CERTIFICATE = "Certificate"
+ MANAGED_IDENTITY = "ManagedIdentity"
+
+
class ServerEndpointHealthState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Type of the server endpoint health state."""
@@ -183,6 +201,16 @@ class ServerEndpointSyncMode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
INITIAL_FULL_DOWNLOAD = "InitialFullDownload"
+class ServerProvisioningStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Server provisioning status."""
+
+ NOT_STARTED = "NotStarted"
+ IN_PROGRESS = "InProgress"
+ READY_SYNC_NOT_FUNCTIONAL = "Ready_SyncNotFunctional"
+ READY_SYNC_FUNCTIONAL = "Ready_SyncFunctional"
+ ERROR = "Error"
+
+
class WorkflowStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Type of the Workflow Status."""
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/models/_models_py3.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/models/_models_py3.py
index bdb6fdc0a436..be09338217c6 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/models/_models_py3.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/models/_models_py3.py
@@ -1,5 +1,5 @@
-# coding=utf-8
# pylint: disable=too-many-lines
+# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
@@ -16,10 +16,9 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from .. import models as _models
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
@@ -35,7 +34,7 @@ class BackupRequest(_serialization.Model):
"azure_file_share": {"key": "azureFileShare", "type": "str"},
}
- def __init__(self, *, azure_file_share: Optional[str] = None, **kwargs):
+ def __init__(self, *, azure_file_share: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword azure_file_share: Azure File Share.
:paramtype azure_file_share: str
@@ -49,7 +48,7 @@ class CheckNameAvailabilityParameters(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar name: The name to check for availability. Required.
:vartype name: str
@@ -70,7 +69,7 @@ class CheckNameAvailabilityParameters(_serialization.Model):
type = "Microsoft.StorageSync/storageSyncServices"
- def __init__(self, *, name: str, **kwargs):
+ def __init__(self, *, name: str, **kwargs: Any) -> None:
"""
:keyword name: The name to check for availability. Required.
:paramtype name: str
@@ -108,7 +107,7 @@ class CheckNameAvailabilityResult(_serialization.Model):
"message": {"key": "message", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.name_available = None
@@ -121,8 +120,8 @@ class Resource(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -148,7 +147,7 @@ class Resource(_serialization.Model):
"system_data": {"key": "systemData", "type": "SystemData"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -158,12 +157,13 @@ def __init__(self, **kwargs):
class ProxyResource(Resource):
- """The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location.
+ """The resource model definition for a Azure Resource Manager proxy resource. It will not have
+ tags and a location.
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -175,32 +175,14 @@ class ProxyResource(Resource):
:vartype system_data: ~azure.mgmt.storagesync.models.SystemData
"""
- _validation = {
- "id": {"readonly": True},
- "name": {"readonly": True},
- "type": {"readonly": True},
- "system_data": {"readonly": True},
- }
-
- _attribute_map = {
- "id": {"key": "id", "type": "str"},
- "name": {"key": "name", "type": "str"},
- "type": {"key": "type", "type": "str"},
- "system_data": {"key": "systemData", "type": "SystemData"},
- }
- def __init__(self, **kwargs):
- """ """
- super().__init__(**kwargs)
-
-
-class CloudEndpoint(ProxyResource): # pylint: disable=too-many-instance-attributes
+class CloudEndpoint(ProxyResource):
"""Cloud Endpoint object.
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -273,8 +255,8 @@ def __init__(
provisioning_state: Optional[str] = None,
last_workflow_id: Optional[str] = None,
last_operation_name: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword storage_account_resource_id: Storage Account Resource Id.
:paramtype storage_account_resource_id: str
@@ -306,7 +288,7 @@ def __init__(
self.change_enumeration_status = None
-class CloudEndpointAfsShareMetadataCertificatePublicKeys(_serialization.Model):
+class CloudEndpointAfsShareMetadataCertificatePublicKeys(_serialization.Model): # pylint: disable=name-too-long
"""Cloud endpoint AFS file share metadata signing certificate public keys.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -327,7 +309,7 @@ class CloudEndpointAfsShareMetadataCertificatePublicKeys(_serialization.Model):
"second_key": {"key": "secondKey", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.first_key = None
@@ -345,7 +327,7 @@ class CloudEndpointArray(_serialization.Model):
"value": {"key": "value", "type": "[CloudEndpoint]"},
}
- def __init__(self, *, value: Optional[List["_models.CloudEndpoint"]] = None, **kwargs):
+ def __init__(self, *, value: Optional[List["_models.CloudEndpoint"]] = None, **kwargs: Any) -> None:
"""
:keyword value: Collection of CloudEndpoint.
:paramtype value: list[~azure.mgmt.storagesync.models.CloudEndpoint]
@@ -354,7 +336,7 @@ def __init__(self, *, value: Optional[List["_models.CloudEndpoint"]] = None, **k
self.value = value
-class CloudEndpointChangeEnumerationActivity(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class CloudEndpointChangeEnumerationActivity(_serialization.Model):
"""Cloud endpoint change enumeration activity object.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -425,7 +407,7 @@ class CloudEndpointChangeEnumerationActivity(_serialization.Model): # pylint: d
"deletes_progress_percent": {"key": "deletesProgressPercent", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.last_updated_timestamp = None
@@ -469,7 +451,7 @@ class CloudEndpointChangeEnumerationStatus(_serialization.Model):
"activity": {"key": "activity", "type": "CloudEndpointChangeEnumerationActivity"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.last_updated_timestamp = None
@@ -482,8 +464,8 @@ class CloudEndpointCreateParameters(ProxyResource):
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -528,8 +510,8 @@ def __init__(
azure_file_share_name: Optional[str] = None,
storage_account_tenant_id: Optional[str] = None,
friendly_name: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword storage_account_resource_id: Storage Account Resource Id.
:paramtype storage_account_resource_id: str
@@ -584,7 +566,7 @@ class CloudEndpointLastChangeEnumerationStatus(_serialization.Model):
"next_run_timestamp": {"key": "nextRunTimestamp", "type": "iso-8601"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.started_timestamp = None
@@ -625,7 +607,7 @@ class CloudTieringCachePerformance(_serialization.Model):
"cache_hit_bytes_percent": {"key": "cacheHitBytesPercent", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.last_updated_timestamp = None
@@ -658,7 +640,7 @@ class CloudTieringDatePolicyStatus(_serialization.Model):
},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.last_updated_timestamp = None
@@ -690,7 +672,7 @@ class CloudTieringFilesNotTiering(_serialization.Model):
"errors": {"key": "errors", "type": "[FilesNotTieringError]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.last_updated_timestamp = None
@@ -719,7 +701,7 @@ class CloudTieringLowDiskMode(_serialization.Model):
"state": {"key": "state", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.last_updated_timestamp = None
@@ -763,7 +745,7 @@ class CloudTieringSpaceSavings(_serialization.Model):
"space_savings_bytes": {"key": "spaceSavingsBytes", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.last_updated_timestamp = None
@@ -800,7 +782,7 @@ class CloudTieringVolumeFreeSpacePolicyStatus(_serialization.Model):
"current_volume_free_space_percent": {"key": "currentVolumeFreeSpacePercent", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.last_updated_timestamp = None
@@ -808,6 +790,98 @@ def __init__(self, **kwargs):
self.current_volume_free_space_percent = None
+class ErrorAdditionalInfo(_serialization.Model):
+ """The resource management error additional info.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The additional info type.
+ :vartype type: str
+ :ivar info: The additional info.
+ :vartype info: JSON
+ """
+
+ _validation = {
+ "type": {"readonly": True},
+ "info": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "type": {"key": "type", "type": "str"},
+ "info": {"key": "info", "type": "object"},
+ }
+
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
+ super().__init__(**kwargs)
+ self.type = None
+ self.info = None
+
+
+class ErrorDetail(_serialization.Model):
+ """The error detail.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: The error code.
+ :vartype code: str
+ :ivar message: The error message.
+ :vartype message: str
+ :ivar target: The error target.
+ :vartype target: str
+ :ivar details: The error details.
+ :vartype details: list[~azure.mgmt.storagesync.models.ErrorDetail]
+ :ivar additional_info: The error additional info.
+ :vartype additional_info: list[~azure.mgmt.storagesync.models.ErrorAdditionalInfo]
+ """
+
+ _validation = {
+ "code": {"readonly": True},
+ "message": {"readonly": True},
+ "target": {"readonly": True},
+ "details": {"readonly": True},
+ "additional_info": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "code": {"key": "code", "type": "str"},
+ "message": {"key": "message", "type": "str"},
+ "target": {"key": "target", "type": "str"},
+ "details": {"key": "details", "type": "[ErrorDetail]"},
+ "additional_info": {"key": "additionalInfo", "type": "[ErrorAdditionalInfo]"},
+ }
+
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
+ super().__init__(**kwargs)
+ self.code = None
+ self.message = None
+ self.target = None
+ self.details = None
+ self.additional_info = None
+
+
+class ErrorResponse(_serialization.Model):
+ """Common error response for all Azure Resource Manager APIs to return error details for failed
+ operations. (This also follows the OData error response format.).
+
+ :ivar error: The error object.
+ :vartype error: ~azure.mgmt.storagesync.models.ErrorDetail
+ """
+
+ _attribute_map = {
+ "error": {"key": "error", "type": "ErrorDetail"},
+ }
+
+ def __init__(self, *, error: Optional["_models.ErrorDetail"] = None, **kwargs: Any) -> None:
+ """
+ :keyword error: The error object.
+ :paramtype error: ~azure.mgmt.storagesync.models.ErrorDetail
+ """
+ super().__init__(**kwargs)
+ self.error = error
+
+
class FilesNotTieringError(_serialization.Model):
"""Files not tiering error object.
@@ -829,7 +903,7 @@ class FilesNotTieringError(_serialization.Model):
"file_count": {"key": "fileCount", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.error_code = None
@@ -877,7 +951,7 @@ class LocationOperationStatus(_serialization.Model):
"percent_complete": {"key": "percentComplete", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
@@ -889,6 +963,70 @@ def __init__(self, **kwargs):
self.percent_complete = None
+class ManagedServiceIdentity(_serialization.Model):
+ """Managed service identity (system assigned and/or user assigned identities).
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar principal_id: The service principal ID of the system assigned identity. This property
+ will only be provided for a system assigned identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of the system assigned identity. This property will only be
+ provided for a system assigned identity.
+ :vartype tenant_id: str
+ :ivar type: Type of managed service identity (where both SystemAssigned and UserAssigned types
+ are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and
+ "SystemAssigned,UserAssigned".
+ :vartype type: str or ~azure.mgmt.storagesync.models.ManagedServiceIdentityType
+ :ivar user_assigned_identities: The set of user assigned identities associated with the
+ resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form:
+ '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. # pylint: disable=line-too-long
+ The dictionary values can be empty objects ({}) in requests.
+ :vartype user_assigned_identities: dict[str,
+ ~azure.mgmt.storagesync.models.UserAssignedIdentity]
+ """
+
+ _validation = {
+ "principal_id": {"readonly": True},
+ "tenant_id": {"readonly": True},
+ "type": {"required": True},
+ }
+
+ _attribute_map = {
+ "principal_id": {"key": "principalId", "type": "str"},
+ "tenant_id": {"key": "tenantId", "type": "str"},
+ "type": {"key": "type", "type": "str"},
+ "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{UserAssignedIdentity}"},
+ }
+
+ def __init__(
+ self,
+ *,
+ type: Union[str, "_models.ManagedServiceIdentityType"],
+ user_assigned_identities: Optional[Dict[str, "_models.UserAssignedIdentity"]] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword type: Type of managed service identity (where both SystemAssigned and UserAssigned
+ types are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and
+ "SystemAssigned,UserAssigned".
+ :paramtype type: str or ~azure.mgmt.storagesync.models.ManagedServiceIdentityType
+ :keyword user_assigned_identities: The set of user assigned identities associated with the
+ resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form:
+ '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. # pylint: disable=line-too-long
+ The dictionary values can be empty objects ({}) in requests.
+ :paramtype user_assigned_identities: dict[str,
+ ~azure.mgmt.storagesync.models.UserAssignedIdentity]
+ """
+ super().__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.type = type
+ self.user_assigned_identities = user_assigned_identities
+
+
class OperationDisplayInfo(_serialization.Model):
"""The operation supported by storage sync.
@@ -916,8 +1054,8 @@ def __init__(
operation: Optional[str] = None,
provider: Optional[str] = None,
resource: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword description: The description of the operation.
:paramtype description: str
@@ -962,8 +1100,8 @@ def __init__(
resource: Optional[str] = None,
operation: Optional[str] = None,
description: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword provider: Operation Display Resource Provider.
:paramtype provider: str
@@ -1008,8 +1146,8 @@ def __init__(
display: Optional["_models.OperationDisplayInfo"] = None,
origin: Optional[str] = None,
properties: Optional["_models.OperationProperties"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword name: Operation name: {provider}/{resource}/{operation}.
:paramtype name: str
@@ -1042,8 +1180,8 @@ class OperationEntityListResult(_serialization.Model):
}
def __init__(
- self, *, next_link: Optional[str] = None, value: Optional[List["_models.OperationEntity"]] = None, **kwargs
- ):
+ self, *, next_link: Optional[str] = None, value: Optional[List["_models.OperationEntity"]] = None, **kwargs: Any
+ ) -> None:
"""
:keyword next_link: The link used to get the next page of operations.
:paramtype next_link: str
@@ -1068,8 +1206,8 @@ class OperationProperties(_serialization.Model):
}
def __init__(
- self, *, service_specification: Optional["_models.OperationResourceServiceSpecification"] = None, **kwargs
- ):
+ self, *, service_specification: Optional["_models.OperationResourceServiceSpecification"] = None, **kwargs: Any
+ ) -> None:
"""
:keyword service_specification: Service specification for the operations resource.
:paramtype service_specification:
@@ -1096,6 +1234,8 @@ class OperationResourceMetricSpecification(_serialization.Model):
:vartype supported_aggregation_types: list[str]
:ivar fill_gap_with_zero: Fill gaps in the metric with zero.
:vartype fill_gap_with_zero: bool
+ :ivar lock_aggregation_type: Lock Aggregation type for the metric.
+ :vartype lock_aggregation_type: str
:ivar dimensions: Dimensions for the metric specification.
:vartype dimensions:
list[~azure.mgmt.storagesync.models.OperationResourceMetricSpecificationDimension]
@@ -1109,6 +1249,7 @@ class OperationResourceMetricSpecification(_serialization.Model):
"aggregation_type": {"key": "aggregationType", "type": "str"},
"supported_aggregation_types": {"key": "supportedAggregationTypes", "type": "[str]"},
"fill_gap_with_zero": {"key": "fillGapWithZero", "type": "bool"},
+ "lock_aggregation_type": {"key": "lockAggregationType", "type": "str"},
"dimensions": {"key": "dimensions", "type": "[OperationResourceMetricSpecificationDimension]"},
}
@@ -1122,9 +1263,10 @@ def __init__(
aggregation_type: Optional[str] = None,
supported_aggregation_types: Optional[List[str]] = None,
fill_gap_with_zero: Optional[bool] = None,
+ lock_aggregation_type: Optional[str] = None,
dimensions: Optional[List["_models.OperationResourceMetricSpecificationDimension"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword name: Name of the metric.
:paramtype name: str
@@ -1140,6 +1282,8 @@ def __init__(
:paramtype supported_aggregation_types: list[str]
:keyword fill_gap_with_zero: Fill gaps in the metric with zero.
:paramtype fill_gap_with_zero: bool
+ :keyword lock_aggregation_type: Lock Aggregation type for the metric.
+ :paramtype lock_aggregation_type: str
:keyword dimensions: Dimensions for the metric specification.
:paramtype dimensions:
list[~azure.mgmt.storagesync.models.OperationResourceMetricSpecificationDimension]
@@ -1152,10 +1296,11 @@ def __init__(
self.aggregation_type = aggregation_type
self.supported_aggregation_types = supported_aggregation_types
self.fill_gap_with_zero = fill_gap_with_zero
+ self.lock_aggregation_type = lock_aggregation_type
self.dimensions = dimensions
-class OperationResourceMetricSpecificationDimension(_serialization.Model):
+class OperationResourceMetricSpecificationDimension(_serialization.Model): # pylint: disable=name-too-long
"""OperationResourceMetricSpecificationDimension object.
:ivar name: Name of the dimension.
@@ -1178,8 +1323,8 @@ def __init__(
name: Optional[str] = None,
display_name: Optional[str] = None,
to_be_exported_for_shoebox: Optional[bool] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword name: Name of the dimension.
:paramtype name: str
@@ -1207,8 +1352,11 @@ class OperationResourceServiceSpecification(_serialization.Model):
}
def __init__(
- self, *, metric_specifications: Optional[List["_models.OperationResourceMetricSpecification"]] = None, **kwargs
- ):
+ self,
+ *,
+ metric_specifications: Optional[List["_models.OperationResourceMetricSpecification"]] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword metric_specifications: List of metric specifications.
:paramtype metric_specifications:
@@ -1251,7 +1399,7 @@ class OperationStatus(_serialization.Model):
"error": {"key": "error", "type": "StorageSyncApiError"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.name = None
@@ -1278,7 +1426,7 @@ class PostBackupResponse(_serialization.Model):
"cloud_endpoint_name": {"key": "backupMetadata.cloudEndpointName", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.cloud_endpoint_name = None
@@ -1327,8 +1475,8 @@ def __init__(
source_azure_file_share_uri: Optional[str] = None,
failed_file_list: Optional[str] = None,
restore_file_spec: Optional[List["_models.RestoreFileSpec"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword partition: Post Restore partition.
:paramtype partition: str
@@ -1409,8 +1557,8 @@ def __init__(
backup_metadata_property_bag: Optional[str] = None,
restore_file_spec: Optional[List["_models.RestoreFileSpec"]] = None,
pause_wait_for_sync_drain_time_period_in_seconds: Optional[int] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword partition: Pre Restore partition.
:paramtype partition: str
@@ -1445,11 +1593,11 @@ def __init__(
class PrivateEndpoint(_serialization.Model):
- """The Private Endpoint resource.
+ """The private endpoint resource.
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id: The ARM identifier for Private Endpoint.
+ :ivar id: The ARM identifier for private endpoint.
:vartype id: str
"""
@@ -1461,19 +1609,19 @@ class PrivateEndpoint(_serialization.Model):
"id": {"key": "id", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
class PrivateEndpointConnection(Resource):
- """The Private Endpoint Connection resource.
+ """The private endpoint connection resource.
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -1483,7 +1631,9 @@ class PrivateEndpointConnection(Resource):
:ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
information.
:vartype system_data: ~azure.mgmt.storagesync.models.SystemData
- :ivar private_endpoint: The resource of private end point.
+ :ivar group_ids: The group ids for the private endpoint resource.
+ :vartype group_ids: list[str]
+ :ivar private_endpoint: The private endpoint resource.
:vartype private_endpoint: ~azure.mgmt.storagesync.models.PrivateEndpoint
:ivar private_link_service_connection_state: A collection of information about the state of the
connection between service consumer and provider.
@@ -1500,6 +1650,7 @@ class PrivateEndpointConnection(Resource):
"name": {"readonly": True},
"type": {"readonly": True},
"system_data": {"readonly": True},
+ "group_ids": {"readonly": True},
"provisioning_state": {"readonly": True},
}
@@ -1508,6 +1659,7 @@ class PrivateEndpointConnection(Resource):
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"system_data": {"key": "systemData", "type": "SystemData"},
+ "group_ids": {"key": "properties.groupIds", "type": "[str]"},
"private_endpoint": {"key": "properties.privateEndpoint", "type": "PrivateEndpoint"},
"private_link_service_connection_state": {
"key": "properties.privateLinkServiceConnectionState",
@@ -1521,10 +1673,10 @@ def __init__(
*,
private_endpoint: Optional["_models.PrivateEndpoint"] = None,
private_link_service_connection_state: Optional["_models.PrivateLinkServiceConnectionState"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
- :keyword private_endpoint: The resource of private end point.
+ :keyword private_endpoint: The private endpoint resource.
:paramtype private_endpoint: ~azure.mgmt.storagesync.models.PrivateEndpoint
:keyword private_link_service_connection_state: A collection of information about the state of
the connection between service consumer and provider.
@@ -1532,13 +1684,14 @@ def __init__(
~azure.mgmt.storagesync.models.PrivateLinkServiceConnectionState
"""
super().__init__(**kwargs)
+ self.group_ids = None
self.private_endpoint = private_endpoint
self.private_link_service_connection_state = private_link_service_connection_state
self.provisioning_state = None
class PrivateEndpointConnectionListResult(_serialization.Model):
- """List of private endpoint connection associated with the specified storage account.
+ """List of private endpoint connections associated with the specified resource.
:ivar value: Array of private endpoint connections.
:vartype value: list[~azure.mgmt.storagesync.models.PrivateEndpointConnection]
@@ -1548,7 +1701,7 @@ class PrivateEndpointConnectionListResult(_serialization.Model):
"value": {"key": "value", "type": "[PrivateEndpointConnection]"},
}
- def __init__(self, *, value: Optional[List["_models.PrivateEndpointConnection"]] = None, **kwargs):
+ def __init__(self, *, value: Optional[List["_models.PrivateEndpointConnection"]] = None, **kwargs: Any) -> None:
"""
:keyword value: Array of private endpoint connections.
:paramtype value: list[~azure.mgmt.storagesync.models.PrivateEndpointConnection]
@@ -1562,8 +1715,8 @@ class PrivateLinkResource(Resource):
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -1577,7 +1730,7 @@ class PrivateLinkResource(Resource):
:vartype group_id: str
:ivar required_members: The private link resource required member names.
:vartype required_members: list[str]
- :ivar required_zone_names: The private link resource Private link DNS zone name.
+ :ivar required_zone_names: The private link resource private link DNS zone name.
:vartype required_zone_names: list[str]
"""
@@ -1600,9 +1753,9 @@ class PrivateLinkResource(Resource):
"required_zone_names": {"key": "properties.requiredZoneNames", "type": "[str]"},
}
- def __init__(self, *, required_zone_names: Optional[List[str]] = None, **kwargs):
+ def __init__(self, *, required_zone_names: Optional[List[str]] = None, **kwargs: Any) -> None:
"""
- :keyword required_zone_names: The private link resource Private link DNS zone name.
+ :keyword required_zone_names: The private link resource private link DNS zone name.
:paramtype required_zone_names: list[str]
"""
super().__init__(**kwargs)
@@ -1622,7 +1775,7 @@ class PrivateLinkResourceListResult(_serialization.Model):
"value": {"key": "value", "type": "[PrivateLinkResource]"},
}
- def __init__(self, *, value: Optional[List["_models.PrivateLinkResource"]] = None, **kwargs):
+ def __init__(self, *, value: Optional[List["_models.PrivateLinkResource"]] = None, **kwargs: Any) -> None:
"""
:keyword value: Array of private link resources.
:paramtype value: list[~azure.mgmt.storagesync.models.PrivateLinkResource]
@@ -1632,7 +1785,8 @@ def __init__(self, *, value: Optional[List["_models.PrivateLinkResource"]] = Non
class PrivateLinkServiceConnectionState(_serialization.Model):
- """A collection of information about the state of the connection between service consumer and provider.
+ """A collection of information about the state of the connection between service consumer and
+ provider.
:ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
of the service. Known values are: "Pending", "Approved", and "Rejected".
@@ -1656,8 +1810,8 @@ def __init__(
status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = None,
description: Optional[str] = None,
actions_required: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword status: Indicates whether the connection has been Approved/Rejected/Removed by the
owner of the service. Known values are: "Pending", "Approved", and "Rejected".
@@ -1688,7 +1842,7 @@ class RecallActionParameters(_serialization.Model):
"recall_path": {"key": "recallPath", "type": "str"},
}
- def __init__(self, *, pattern: Optional[str] = None, recall_path: Optional[str] = None, **kwargs):
+ def __init__(self, *, pattern: Optional[str] = None, recall_path: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword pattern: Pattern of the files.
:paramtype pattern: str
@@ -1700,13 +1854,13 @@ def __init__(self, *, pattern: Optional[str] = None, recall_path: Optional[str]
self.recall_path = recall_path
-class RegisteredServer(ProxyResource): # pylint: disable=too-many-instance-attributes
+class RegisteredServer(ProxyResource):
"""Registered Server resource.
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -1764,6 +1918,16 @@ class RegisteredServer(ProxyResource): # pylint: disable=too-many-instance-attr
:vartype monitoring_configuration: str
:ivar server_name: Server name.
:vartype server_name: str
+ :ivar application_id: Server Application Id.
+ :vartype application_id: str
+ :ivar identity: Apply server with newly discovered ApplicationId if available.
+ :vartype identity: bool
+ :ivar latest_application_id: Latest Server Application Id discovered from the server. It is not
+ yet applied.
+ :vartype latest_application_id: str
+ :ivar active_auth_type: Server auth type. Known values are: "Certificate" and
+ "ManagedIdentity".
+ :vartype active_auth_type: str or ~azure.mgmt.storagesync.models.ServerAuthType
"""
_validation = {
@@ -1774,6 +1938,8 @@ class RegisteredServer(ProxyResource): # pylint: disable=too-many-instance-attr
"agent_version_status": {"readonly": True},
"agent_version_expiration_date": {"readonly": True},
"server_name": {"readonly": True},
+ "identity": {"readonly": True},
+ "active_auth_type": {"readonly": True},
}
_attribute_map = {
@@ -1804,6 +1970,10 @@ class RegisteredServer(ProxyResource): # pylint: disable=too-many-instance-attr
"monitoring_endpoint_uri": {"key": "properties.monitoringEndpointUri", "type": "str"},
"monitoring_configuration": {"key": "properties.monitoringConfiguration", "type": "str"},
"server_name": {"key": "properties.serverName", "type": "str"},
+ "application_id": {"key": "properties.applicationId", "type": "str"},
+ "identity": {"key": "properties.identity", "type": "bool"},
+ "latest_application_id": {"key": "properties.latestApplicationId", "type": "str"},
+ "active_auth_type": {"key": "properties.activeAuthType", "type": "str"},
}
def __init__( # pylint: disable=too-many-locals
@@ -1829,8 +1999,10 @@ def __init__( # pylint: disable=too-many-locals
management_endpoint_uri: Optional[str] = None,
monitoring_endpoint_uri: Optional[str] = None,
monitoring_configuration: Optional[str] = None,
- **kwargs
- ):
+ application_id: Optional[str] = None,
+ latest_application_id: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword server_certificate: Registered Server Certificate.
:paramtype server_certificate: str
@@ -1872,6 +2044,11 @@ def __init__( # pylint: disable=too-many-locals
:paramtype monitoring_endpoint_uri: str
:keyword monitoring_configuration: Monitoring Configuration.
:paramtype monitoring_configuration: str
+ :keyword application_id: Server Application Id.
+ :paramtype application_id: str
+ :keyword latest_application_id: Latest Server Application Id discovered from the server. It is
+ not yet applied.
+ :paramtype latest_application_id: str
"""
super().__init__(**kwargs)
self.server_certificate = server_certificate
@@ -1897,6 +2074,10 @@ def __init__( # pylint: disable=too-many-locals
self.monitoring_endpoint_uri = monitoring_endpoint_uri
self.monitoring_configuration = monitoring_configuration
self.server_name = None
+ self.application_id = application_id
+ self.identity = None
+ self.latest_application_id = latest_application_id
+ self.active_auth_type = None
class RegisteredServerArray(_serialization.Model):
@@ -1910,7 +2091,7 @@ class RegisteredServerArray(_serialization.Model):
"value": {"key": "value", "type": "[RegisteredServer]"},
}
- def __init__(self, *, value: Optional[List["_models.RegisteredServer"]] = None, **kwargs):
+ def __init__(self, *, value: Optional[List["_models.RegisteredServer"]] = None, **kwargs: Any) -> None:
"""
:keyword value: Collection of Registered Server.
:paramtype value: list[~azure.mgmt.storagesync.models.RegisteredServer]
@@ -1919,13 +2100,13 @@ def __init__(self, *, value: Optional[List["_models.RegisteredServer"]] = None,
self.value = value
-class RegisteredServerCreateParameters(ProxyResource): # pylint: disable=too-many-instance-attributes
+class RegisteredServerCreateParameters(ProxyResource):
"""The parameters used when creating a registered server.
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -1953,6 +2134,10 @@ class RegisteredServerCreateParameters(ProxyResource): # pylint: disable=too-ma
:vartype server_id: str
:ivar friendly_name: Friendly Name.
:vartype friendly_name: str
+ :ivar application_id: Server ServicePrincipal Id.
+ :vartype application_id: str
+ :ivar identity: Apply server with newly discovered ApplicationId if available.
+ :vartype identity: bool
"""
_validation = {
@@ -1976,6 +2161,8 @@ class RegisteredServerCreateParameters(ProxyResource): # pylint: disable=too-ma
"cluster_name": {"key": "properties.clusterName", "type": "str"},
"server_id": {"key": "properties.serverId", "type": "str"},
"friendly_name": {"key": "properties.friendlyName", "type": "str"},
+ "application_id": {"key": "properties.applicationId", "type": "str"},
+ "identity": {"key": "properties.identity", "type": "bool"},
}
def __init__(
@@ -1990,8 +2177,10 @@ def __init__(
cluster_name: Optional[str] = None,
server_id: Optional[str] = None,
friendly_name: Optional[str] = None,
- **kwargs
- ):
+ application_id: Optional[str] = None,
+ identity: Optional[bool] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword server_certificate: Registered Server Certificate.
:paramtype server_certificate: str
@@ -2011,6 +2200,10 @@ def __init__(
:paramtype server_id: str
:keyword friendly_name: Friendly Name.
:paramtype friendly_name: str
+ :keyword application_id: Server ServicePrincipal Id.
+ :paramtype application_id: str
+ :keyword identity: Apply server with newly discovered ApplicationId if available.
+ :paramtype identity: bool
"""
super().__init__(**kwargs)
self.server_certificate = server_certificate
@@ -2022,6 +2215,58 @@ def __init__(
self.cluster_name = cluster_name
self.server_id = server_id
self.friendly_name = friendly_name
+ self.application_id = application_id
+ self.identity = identity
+
+
+class RegisteredServerUpdateParameters(ProxyResource):
+ """The parameters used when updating a registered server.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
+ information.
+ :vartype system_data: ~azure.mgmt.storagesync.models.SystemData
+ :ivar identity: Apply server with newly discovered ApplicationId if available.
+ :vartype identity: bool
+ :ivar application_id: Apply server with new ServicePrincipal Id.
+ :vartype application_id: str
+ """
+
+ _validation = {
+ "id": {"readonly": True},
+ "name": {"readonly": True},
+ "type": {"readonly": True},
+ "system_data": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "type": {"key": "type", "type": "str"},
+ "system_data": {"key": "systemData", "type": "SystemData"},
+ "identity": {"key": "properties.identity", "type": "bool"},
+ "application_id": {"key": "properties.applicationId", "type": "str"},
+ }
+
+ def __init__(self, *, identity: Optional[bool] = None, application_id: Optional[str] = None, **kwargs: Any) -> None:
+ """
+ :keyword identity: Apply server with newly discovered ApplicationId if available.
+ :paramtype identity: bool
+ :keyword application_id: Apply server with new ServicePrincipal Id.
+ :paramtype application_id: str
+ """
+ super().__init__(**kwargs)
+ self.identity = identity
+ self.application_id = application_id
class ResourcesMoveInfo(_serialization.Model):
@@ -2038,7 +2283,9 @@ class ResourcesMoveInfo(_serialization.Model):
"resources": {"key": "resources", "type": "[str]"},
}
- def __init__(self, *, target_resource_group: Optional[str] = None, resources: Optional[List[str]] = None, **kwargs):
+ def __init__(
+ self, *, target_resource_group: Optional[str] = None, resources: Optional[List[str]] = None, **kwargs: Any
+ ) -> None:
"""
:keyword target_resource_group: Target resource group.
:paramtype target_resource_group: str
@@ -2064,7 +2311,7 @@ class RestoreFileSpec(_serialization.Model):
"isdir": {"key": "isdir", "type": "bool"},
}
- def __init__(self, *, path: Optional[str] = None, isdir: Optional[bool] = None, **kwargs):
+ def __init__(self, *, path: Optional[str] = None, isdir: Optional[bool] = None, **kwargs: Any) -> None:
"""
:keyword path: Restore file spec path.
:paramtype path: str
@@ -2076,13 +2323,13 @@ def __init__(self, *, path: Optional[str] = None, isdir: Optional[bool] = None,
self.isdir = isdir
-class ServerEndpoint(ProxyResource): # pylint: disable=too-many-instance-attributes
+class ServerEndpoint(ProxyResource):
"""Server Endpoint object.
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -2139,6 +2386,9 @@ class ServerEndpoint(ProxyResource): # pylint: disable=too-many-instance-attrib
:vartype initial_upload_policy: str or ~azure.mgmt.storagesync.models.InitialUploadPolicy
:ivar server_name: Server name.
:vartype server_name: str
+ :ivar server_endpoint_provisioning_status: Server Endpoint provisioning status.
+ :vartype server_endpoint_provisioning_status:
+ ~azure.mgmt.storagesync.models.ServerEndpointProvisioningStatus
"""
_validation = {
@@ -2190,6 +2440,10 @@ class ServerEndpoint(ProxyResource): # pylint: disable=too-many-instance-attrib
"local_cache_mode": {"key": "properties.localCacheMode", "type": "str"},
"initial_upload_policy": {"key": "properties.initialUploadPolicy", "type": "str"},
"server_name": {"key": "properties.serverName", "type": "str"},
+ "server_endpoint_provisioning_status": {
+ "key": "properties.serverEndpointProvisioningStatus",
+ "type": "ServerEndpointProvisioningStatus",
+ },
}
def __init__( # pylint: disable=too-many-locals
@@ -2203,11 +2457,12 @@ def __init__( # pylint: disable=too-many-locals
server_resource_id: Optional[str] = None,
offline_data_transfer: Optional[Union[str, "_models.FeatureStatus"]] = None,
offline_data_transfer_share_name: Optional[str] = None,
- initial_download_policy: Optional[Union[str, "_models.InitialDownloadPolicy"]] = None,
- local_cache_mode: Optional[Union[str, "_models.LocalCacheMode"]] = None,
- initial_upload_policy: Optional[Union[str, "_models.InitialUploadPolicy"]] = None,
- **kwargs
- ):
+ initial_download_policy: Union[str, "_models.InitialDownloadPolicy"] = "NamespaceThenModifiedFiles",
+ local_cache_mode: Union[str, "_models.LocalCacheMode"] = "UpdateLocallyCachedFiles",
+ initial_upload_policy: Union[str, "_models.InitialUploadPolicy"] = "Merge",
+ server_endpoint_provisioning_status: Optional["_models.ServerEndpointProvisioningStatus"] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword server_local_path: Server Local path.
:paramtype server_local_path: str
@@ -2237,6 +2492,9 @@ def __init__( # pylint: disable=too-many-locals
:keyword initial_upload_policy: Policy for how the initial upload sync session is performed.
Known values are: "ServerAuthoritative" and "Merge".
:paramtype initial_upload_policy: str or ~azure.mgmt.storagesync.models.InitialUploadPolicy
+ :keyword server_endpoint_provisioning_status: Server Endpoint provisioning status.
+ :paramtype server_endpoint_provisioning_status:
+ ~azure.mgmt.storagesync.models.ServerEndpointProvisioningStatus
"""
super().__init__(**kwargs)
self.server_local_path = server_local_path
@@ -2259,6 +2517,7 @@ def __init__( # pylint: disable=too-many-locals
self.local_cache_mode = local_cache_mode
self.initial_upload_policy = initial_upload_policy
self.server_name = None
+ self.server_endpoint_provisioning_status = server_endpoint_provisioning_status
class ServerEndpointArray(_serialization.Model):
@@ -2272,7 +2531,7 @@ class ServerEndpointArray(_serialization.Model):
"value": {"key": "value", "type": "[ServerEndpoint]"},
}
- def __init__(self, *, value: Optional[List["_models.ServerEndpoint"]] = None, **kwargs):
+ def __init__(self, *, value: Optional[List["_models.ServerEndpoint"]] = None, **kwargs: Any) -> None:
"""
:keyword value: Collection of ServerEndpoint.
:paramtype value: list[~azure.mgmt.storagesync.models.ServerEndpoint]
@@ -2281,7 +2540,7 @@ def __init__(self, *, value: Optional[List["_models.ServerEndpoint"]] = None, **
self.value = value
-class ServerEndpointBackgroundDataDownloadActivity(_serialization.Model):
+class ServerEndpointBackgroundDataDownloadActivity(_serialization.Model): # pylint: disable=name-too-long
"""Background data download activity object.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -2310,7 +2569,7 @@ class ServerEndpointBackgroundDataDownloadActivity(_serialization.Model):
"downloaded_bytes": {"key": "downloadedBytes", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.timestamp = None
@@ -2319,7 +2578,7 @@ def __init__(self, **kwargs):
self.downloaded_bytes = None
-class ServerEndpointCloudTieringStatus(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class ServerEndpointCloudTieringStatus(_serialization.Model):
"""Server endpoint cloud tiering status object.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -2382,7 +2641,7 @@ class ServerEndpointCloudTieringStatus(_serialization.Model): # pylint: disable
"low_disk_mode": {"key": "lowDiskMode", "type": "CloudTieringLowDiskMode"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.last_updated_timestamp = None
@@ -2398,13 +2657,13 @@ def __init__(self, **kwargs):
self.low_disk_mode = None
-class ServerEndpointCreateParameters(ProxyResource): # pylint: disable=too-many-instance-attributes
+class ServerEndpointCreateParameters(ProxyResource):
"""The parameters used when creating a server endpoint.
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -2481,11 +2740,11 @@ def __init__(
server_resource_id: Optional[str] = None,
offline_data_transfer: Optional[Union[str, "_models.FeatureStatus"]] = None,
offline_data_transfer_share_name: Optional[str] = None,
- initial_download_policy: Optional[Union[str, "_models.InitialDownloadPolicy"]] = None,
- local_cache_mode: Optional[Union[str, "_models.LocalCacheMode"]] = None,
- initial_upload_policy: Optional[Union[str, "_models.InitialUploadPolicy"]] = None,
- **kwargs
- ):
+ initial_download_policy: Union[str, "_models.InitialDownloadPolicy"] = "NamespaceThenModifiedFiles",
+ local_cache_mode: Union[str, "_models.LocalCacheMode"] = "UpdateLocallyCachedFiles",
+ initial_upload_policy: Union[str, "_models.InitialUploadPolicy"] = "Merge",
+ **kwargs: Any
+ ) -> None:
"""
:keyword server_local_path: Server Local path.
:paramtype server_local_path: str
@@ -2555,7 +2814,7 @@ class ServerEndpointFilesNotSyncingError(_serialization.Model):
"transient_count": {"key": "transientCount", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.error_code = None
@@ -2563,6 +2822,103 @@ def __init__(self, **kwargs):
self.transient_count = None
+class ServerEndpointProvisioningStatus(_serialization.Model):
+ """Server endpoint provisioning status information.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar provisioning_status: Server Endpoint provisioning status. Known values are: "NotStarted",
+ "InProgress", "Ready_SyncNotFunctional", "Ready_SyncFunctional", and "Error".
+ :vartype provisioning_status: str or ~azure.mgmt.storagesync.models.ServerProvisioningStatus
+ :ivar provisioning_type: Server Endpoint provisioning type.
+ :vartype provisioning_type: str
+ :ivar provisioning_step_statuses: Provisioning Step status information for each step in the
+ provisioning process.
+ :vartype provisioning_step_statuses:
+ list[~azure.mgmt.storagesync.models.ServerEndpointProvisioningStepStatus]
+ """
+
+ _validation = {
+ "provisioning_status": {"readonly": True},
+ "provisioning_type": {"readonly": True},
+ "provisioning_step_statuses": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "provisioning_status": {"key": "provisioningStatus", "type": "str"},
+ "provisioning_type": {"key": "provisioningType", "type": "str"},
+ "provisioning_step_statuses": {
+ "key": "provisioningStepStatuses",
+ "type": "[ServerEndpointProvisioningStepStatus]",
+ },
+ }
+
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
+ super().__init__(**kwargs)
+ self.provisioning_status = None
+ self.provisioning_type = None
+ self.provisioning_step_statuses = None
+
+
+class ServerEndpointProvisioningStepStatus(_serialization.Model):
+ """Server endpoint provisioning step status object.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: Name of the provisioning step.
+ :vartype name: str
+ :ivar status: Status of the provisioning step.
+ :vartype status: str
+ :ivar start_time: Start time of the provisioning step.
+ :vartype start_time: ~datetime.datetime
+ :ivar minutes_left: Estimated completion time of the provisioning step in minutes.
+ :vartype minutes_left: int
+ :ivar progress_percentage: Estimated progress percentage.
+ :vartype progress_percentage: int
+ :ivar end_time: End time of the provisioning step.
+ :vartype end_time: ~datetime.datetime
+ :ivar error_code: Error code (HResult) for the provisioning step.
+ :vartype error_code: int
+ :ivar additional_information: Additional information for the provisioning step.
+ :vartype additional_information: dict[str, str]
+ """
+
+ _validation = {
+ "name": {"readonly": True},
+ "status": {"readonly": True},
+ "start_time": {"readonly": True},
+ "minutes_left": {"readonly": True},
+ "progress_percentage": {"readonly": True},
+ "end_time": {"readonly": True},
+ "error_code": {"readonly": True},
+ "additional_information": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "name": {"key": "name", "type": "str"},
+ "status": {"key": "status", "type": "str"},
+ "start_time": {"key": "startTime", "type": "iso-8601"},
+ "minutes_left": {"key": "minutesLeft", "type": "int"},
+ "progress_percentage": {"key": "progressPercentage", "type": "int"},
+ "end_time": {"key": "endTime", "type": "iso-8601"},
+ "error_code": {"key": "errorCode", "type": "int"},
+ "additional_information": {"key": "additionalInformation", "type": "{str}"},
+ }
+
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
+ super().__init__(**kwargs)
+ self.name = None
+ self.status = None
+ self.start_time = None
+ self.minutes_left = None
+ self.progress_percentage = None
+ self.end_time = None
+ self.error_code = None
+ self.additional_information = None
+
+
class ServerEndpointRecallError(_serialization.Model):
"""Server endpoint recall error object.
@@ -2584,7 +2940,7 @@ class ServerEndpointRecallError(_serialization.Model):
"count": {"key": "count", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.error_code = None
@@ -2616,7 +2972,7 @@ class ServerEndpointRecallStatus(_serialization.Model):
"recall_errors": {"key": "recallErrors", "type": "[ServerEndpointRecallError]"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.last_updated_timestamp = None
@@ -2670,7 +3026,7 @@ class ServerEndpointSyncActivityStatus(_serialization.Model):
"session_minutes_remaining": {"key": "sessionMinutesRemaining", "type": "int"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.timestamp = None
@@ -2730,7 +3086,7 @@ class ServerEndpointSyncSessionStatus(_serialization.Model):
"last_sync_mode": {"key": "lastSyncMode", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.last_sync_result = None
@@ -2743,7 +3099,7 @@ def __init__(self, **kwargs):
self.last_sync_mode = None
-class ServerEndpointSyncStatus(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class ServerEndpointSyncStatus(_serialization.Model):
"""Server Endpoint sync status.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -2815,7 +3171,7 @@ class ServerEndpointSyncStatus(_serialization.Model): # pylint: disable=too-man
},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.download_health = None
@@ -2874,9 +3230,9 @@ def __init__(
tier_files_older_than_days: Optional[int] = None,
offline_data_transfer: Optional[Union[str, "_models.FeatureStatus"]] = None,
offline_data_transfer_share_name: Optional[str] = None,
- local_cache_mode: Optional[Union[str, "_models.LocalCacheMode"]] = None,
- **kwargs
- ):
+ local_cache_mode: Union[str, "_models.LocalCacheMode"] = "UpdateLocallyCachedFiles",
+ **kwargs: Any
+ ) -> None:
"""
:keyword cloud_tiering: Cloud Tiering. Known values are: "on" and "off".
:paramtype cloud_tiering: str or ~azure.mgmt.storagesync.models.FeatureStatus
@@ -2934,8 +3290,8 @@ def __init__(
target: Optional[str] = None,
details: Optional["_models.StorageSyncErrorDetails"] = None,
innererror: Optional["_models.StorageSyncInnerErrorDetails"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword code: Error code of the given entry.
:paramtype code: str
@@ -2975,8 +3331,8 @@ def __init__(
*,
error: Optional["_models.StorageSyncApiError"] = None,
innererror: Optional["_models.StorageSyncApiError"] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword error: Error details of the given entry.
:paramtype error: ~azure.mgmt.storagesync.models.StorageSyncApiError
@@ -3031,8 +3387,8 @@ def __init__(
http_method: Optional[str] = None,
hashed_message: Optional[str] = None,
http_error_code: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword code: Error code of the given entry.
:paramtype code: str
@@ -3089,8 +3445,8 @@ def __init__(
message: Optional[str] = None,
inner_exception: Optional[str] = None,
inner_exception_call_stack: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword call_stack: Call stack of the error.
:paramtype call_stack: str
@@ -3109,14 +3465,15 @@ def __init__(
class TrackedResource(Resource):
- """The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'.
+ """The resource model definition for an Azure Resource Manager tracked top level resource which
+ has 'tags' and a 'location'.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
- :ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -3149,7 +3506,7 @@ class TrackedResource(Resource):
"location": {"key": "location", "type": "str"},
}
- def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs):
+ def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None:
"""
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
@@ -3161,15 +3518,15 @@ def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kw
self.location = location
-class StorageSyncService(TrackedResource): # pylint: disable=too-many-instance-attributes
+class StorageSyncService(TrackedResource):
"""Storage Sync Service object.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
- :ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -3183,6 +3540,9 @@ class StorageSyncService(TrackedResource): # pylint: disable=too-many-instance-
:vartype tags: dict[str, str]
:ivar location: The geo-location where the resource lives. Required.
:vartype location: str
+ :ivar identity: managed identities for the Storage Sync service to interact with other Azure
+ services without maintaining any secrets or credentials in code.
+ :vartype identity: ~azure.mgmt.storagesync.models.ManagedServiceIdentity
:ivar incoming_traffic_policy: Incoming Traffic Policy. Known values are: "AllowAllTraffic" and
"AllowVirtualNetworksOnly".
:vartype incoming_traffic_policy: str or ~azure.mgmt.storagesync.models.IncomingTrafficPolicy
@@ -3192,6 +3552,9 @@ class StorageSyncService(TrackedResource): # pylint: disable=too-many-instance-
:vartype storage_sync_service_uid: str
:ivar provisioning_state: StorageSyncService Provisioning State.
:vartype provisioning_state: str
+ :ivar use_identity: Use Identity authorization when customer have finished setup RBAC
+ permissions.
+ :vartype use_identity: bool
:ivar last_workflow_id: StorageSyncService lastWorkflowId.
:vartype last_workflow_id: str
:ivar last_operation_name: Resource Last Operation Name.
@@ -3211,6 +3574,7 @@ class StorageSyncService(TrackedResource): # pylint: disable=too-many-instance-
"storage_sync_service_status": {"readonly": True},
"storage_sync_service_uid": {"readonly": True},
"provisioning_state": {"readonly": True},
+ "use_identity": {"readonly": True},
"last_workflow_id": {"readonly": True},
"last_operation_name": {"readonly": True},
"private_endpoint_connections": {"readonly": True},
@@ -3223,10 +3587,12 @@ class StorageSyncService(TrackedResource): # pylint: disable=too-many-instance-
"system_data": {"key": "systemData", "type": "SystemData"},
"tags": {"key": "tags", "type": "{str}"},
"location": {"key": "location", "type": "str"},
+ "identity": {"key": "identity", "type": "ManagedServiceIdentity"},
"incoming_traffic_policy": {"key": "properties.incomingTrafficPolicy", "type": "str"},
"storage_sync_service_status": {"key": "properties.storageSyncServiceStatus", "type": "int"},
"storage_sync_service_uid": {"key": "properties.storageSyncServiceUid", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
+ "use_identity": {"key": "properties.useIdentity", "type": "bool"},
"last_workflow_id": {"key": "properties.lastWorkflowId", "type": "str"},
"last_operation_name": {"key": "properties.lastOperationName", "type": "str"},
"private_endpoint_connections": {
@@ -3240,23 +3606,29 @@ def __init__(
*,
location: str,
tags: Optional[Dict[str, str]] = None,
+ identity: Optional["_models.ManagedServiceIdentity"] = None,
incoming_traffic_policy: Optional[Union[str, "_models.IncomingTrafficPolicy"]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
:keyword location: The geo-location where the resource lives. Required.
:paramtype location: str
+ :keyword identity: managed identities for the Storage Sync service to interact with other Azure
+ services without maintaining any secrets or credentials in code.
+ :paramtype identity: ~azure.mgmt.storagesync.models.ManagedServiceIdentity
:keyword incoming_traffic_policy: Incoming Traffic Policy. Known values are: "AllowAllTraffic"
and "AllowVirtualNetworksOnly".
:paramtype incoming_traffic_policy: str or ~azure.mgmt.storagesync.models.IncomingTrafficPolicy
"""
super().__init__(tags=tags, location=location, **kwargs)
+ self.identity = identity
self.incoming_traffic_policy = incoming_traffic_policy
self.storage_sync_service_status = None
self.storage_sync_service_uid = None
self.provisioning_state = None
+ self.use_identity = None
self.last_workflow_id = None
self.last_operation_name = None
self.private_endpoint_connections = None
@@ -3273,7 +3645,7 @@ class StorageSyncServiceArray(_serialization.Model):
"value": {"key": "value", "type": "[StorageSyncService]"},
}
- def __init__(self, *, value: Optional[List["_models.StorageSyncService"]] = None, **kwargs):
+ def __init__(self, *, value: Optional[List["_models.StorageSyncService"]] = None, **kwargs: Any) -> None:
"""
:keyword value: Collection of StorageSyncServices.
:paramtype value: list[~azure.mgmt.storagesync.models.StorageSyncService]
@@ -3282,34 +3654,57 @@ def __init__(self, *, value: Optional[List["_models.StorageSyncService"]] = None
self.value = value
-class StorageSyncServiceCreateParameters(_serialization.Model):
+class StorageSyncServiceCreateParameters(TrackedResource):
"""The parameters used when creating a storage sync service.
- All required parameters must be populated in order to send to Azure.
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to server.
- :ivar location: Required. Gets or sets the location of the resource. This will be one of the
- supported and registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). The
- geo region of a resource cannot be changed once it is created, but if an identical geo region
- is specified on update, the request will succeed. Required.
- :vartype location: str
- :ivar tags: Gets or sets a list of key value pairs that describe the resource. These tags can
- be used for viewing and grouping this resource (across resource groups). A maximum of 15 tags
- can be provided for a resource. Each tag must have a key with a length no greater than 128
- characters and a value with a length no greater than 256 characters.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
+ information.
+ :vartype system_data: ~azure.mgmt.storagesync.models.SystemData
+ :ivar tags: Resource tags.
:vartype tags: dict[str, str]
+ :ivar location: The geo-location where the resource lives. Required.
+ :vartype location: str
+ :ivar identity: managed identities for the Storage Sync to interact with other Azure services
+ without maintaining any secrets or credentials in code.
+ :vartype identity: ~azure.mgmt.storagesync.models.ManagedServiceIdentity
:ivar incoming_traffic_policy: Incoming Traffic Policy. Known values are: "AllowAllTraffic" and
"AllowVirtualNetworksOnly".
:vartype incoming_traffic_policy: str or ~azure.mgmt.storagesync.models.IncomingTrafficPolicy
+ :ivar use_identity: Use Identity authorization when customer have finished setup RBAC
+ permissions.
+ :vartype use_identity: bool
"""
_validation = {
+ "id": {"readonly": True},
+ "name": {"readonly": True},
+ "type": {"readonly": True},
+ "system_data": {"readonly": True},
"location": {"required": True},
}
_attribute_map = {
- "location": {"key": "location", "type": "str"},
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "type": {"key": "type", "type": "str"},
+ "system_data": {"key": "systemData", "type": "SystemData"},
"tags": {"key": "tags", "type": "{str}"},
+ "location": {"key": "location", "type": "str"},
+ "identity": {"key": "identity", "type": "ManagedServiceIdentity"},
"incoming_traffic_policy": {"key": "properties.incomingTrafficPolicy", "type": "str"},
+ "use_identity": {"key": "properties.useIdentity", "type": "bool"},
}
def __init__(
@@ -3317,28 +3712,30 @@ def __init__(
*,
location: str,
tags: Optional[Dict[str, str]] = None,
+ identity: Optional["_models.ManagedServiceIdentity"] = None,
incoming_traffic_policy: Optional[Union[str, "_models.IncomingTrafficPolicy"]] = None,
- **kwargs
- ):
+ use_identity: Optional[bool] = None,
+ **kwargs: Any
+ ) -> None:
"""
- :keyword location: Required. Gets or sets the location of the resource. This will be one of the
- supported and registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). The
- geo region of a resource cannot be changed once it is created, but if an identical geo region
- is specified on update, the request will succeed. Required.
- :paramtype location: str
- :keyword tags: Gets or sets a list of key value pairs that describe the resource. These tags
- can be used for viewing and grouping this resource (across resource groups). A maximum of 15
- tags can be provided for a resource. Each tag must have a key with a length no greater than 128
- characters and a value with a length no greater than 256 characters.
+ :keyword tags: Resource tags.
:paramtype tags: dict[str, str]
+ :keyword location: The geo-location where the resource lives. Required.
+ :paramtype location: str
+ :keyword identity: managed identities for the Storage Sync to interact with other Azure
+ services without maintaining any secrets or credentials in code.
+ :paramtype identity: ~azure.mgmt.storagesync.models.ManagedServiceIdentity
:keyword incoming_traffic_policy: Incoming Traffic Policy. Known values are: "AllowAllTraffic"
and "AllowVirtualNetworksOnly".
:paramtype incoming_traffic_policy: str or ~azure.mgmt.storagesync.models.IncomingTrafficPolicy
+ :keyword use_identity: Use Identity authorization when customer have finished setup RBAC
+ permissions.
+ :paramtype use_identity: bool
"""
- super().__init__(**kwargs)
- self.location = location
- self.tags = tags
+ super().__init__(tags=tags, location=location, **kwargs)
+ self.identity = identity
self.incoming_traffic_policy = incoming_traffic_policy
+ self.use_identity = use_identity
class StorageSyncServiceUpdateParameters(_serialization.Model):
@@ -3346,33 +3743,51 @@ class StorageSyncServiceUpdateParameters(_serialization.Model):
:ivar tags: The user-specified tags associated with the storage sync service.
:vartype tags: dict[str, str]
+ :ivar identity: managed identities for the Container App to interact with other Azure services
+ without maintaining any secrets or credentials in code.
+ :vartype identity: ~azure.mgmt.storagesync.models.ManagedServiceIdentity
:ivar incoming_traffic_policy: Incoming Traffic Policy. Known values are: "AllowAllTraffic" and
"AllowVirtualNetworksOnly".
:vartype incoming_traffic_policy: str or ~azure.mgmt.storagesync.models.IncomingTrafficPolicy
+ :ivar use_identity: Use Identity authorization when customer have finished setup RBAC
+ permissions.
+ :vartype use_identity: bool
"""
_attribute_map = {
"tags": {"key": "tags", "type": "{str}"},
+ "identity": {"key": "identity", "type": "ManagedServiceIdentity"},
"incoming_traffic_policy": {"key": "properties.incomingTrafficPolicy", "type": "str"},
+ "use_identity": {"key": "properties.useIdentity", "type": "bool"},
}
def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
+ identity: Optional["_models.ManagedServiceIdentity"] = None,
incoming_traffic_policy: Optional[Union[str, "_models.IncomingTrafficPolicy"]] = None,
- **kwargs
- ):
+ use_identity: Optional[bool] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword tags: The user-specified tags associated with the storage sync service.
:paramtype tags: dict[str, str]
+ :keyword identity: managed identities for the Container App to interact with other Azure
+ services without maintaining any secrets or credentials in code.
+ :paramtype identity: ~azure.mgmt.storagesync.models.ManagedServiceIdentity
:keyword incoming_traffic_policy: Incoming Traffic Policy. Known values are: "AllowAllTraffic"
and "AllowVirtualNetworksOnly".
:paramtype incoming_traffic_policy: str or ~azure.mgmt.storagesync.models.IncomingTrafficPolicy
+ :keyword use_identity: Use Identity authorization when customer have finished setup RBAC
+ permissions.
+ :paramtype use_identity: bool
"""
super().__init__(**kwargs)
self.tags = tags
+ self.identity = identity
self.incoming_traffic_policy = incoming_traffic_policy
+ self.use_identity = use_identity
class SubscriptionState(_serialization.Model):
@@ -3400,8 +3815,8 @@ class SubscriptionState(_serialization.Model):
}
def __init__(
- self, *, state: Optional[Union[str, "_models.Reason"]] = None, properties: Optional[JSON] = None, **kwargs
- ):
+ self, *, state: Optional[Union[str, "_models.Reason"]] = None, properties: Optional[JSON] = None, **kwargs: Any
+ ) -> None:
"""
:keyword state: State of Azure Subscription. Known values are: "Registered", "Unregistered",
"Warned", "Suspended", and "Deleted".
@@ -3420,8 +3835,8 @@ class SyncGroup(ProxyResource):
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -3455,7 +3870,7 @@ class SyncGroup(ProxyResource):
"sync_group_status": {"key": "properties.syncGroupStatus", "type": "str"},
}
- def __init__(self, **kwargs):
+ def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.unique_id = None
@@ -3473,7 +3888,7 @@ class SyncGroupArray(_serialization.Model):
"value": {"key": "value", "type": "[SyncGroup]"},
}
- def __init__(self, *, value: Optional[List["_models.SyncGroup"]] = None, **kwargs):
+ def __init__(self, *, value: Optional[List["_models.SyncGroup"]] = None, **kwargs: Any) -> None:
"""
:keyword value: Collection of SyncGroup.
:paramtype value: list[~azure.mgmt.storagesync.models.SyncGroup]
@@ -3487,8 +3902,8 @@ class SyncGroupCreateParameters(ProxyResource):
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -3517,7 +3932,7 @@ class SyncGroupCreateParameters(ProxyResource):
"properties": {"key": "properties", "type": "object"},
}
- def __init__(self, *, properties: Optional[JSON] = None, **kwargs):
+ def __init__(self, *, properties: Optional[JSON] = None, **kwargs: Any) -> None:
"""
:keyword properties: The parameters used to create the sync group.
:paramtype properties: JSON
@@ -3563,8 +3978,8 @@ def __init__(
last_modified_by: Optional[str] = None,
last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None,
last_modified_at: Optional[datetime.datetime] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword created_by: The identity that created the resource.
:paramtype created_by: str
@@ -3616,8 +4031,8 @@ def __init__(
directory_path: Optional[str] = None,
change_detection_mode: Optional[Union[str, "_models.ChangeDetectionMode"]] = None,
paths: Optional[List[str]] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword directory_path: Relative path to a directory Azure File share for which change
detection is to be performed.
@@ -3646,7 +4061,7 @@ class TriggerRolloverRequest(_serialization.Model):
"server_certificate": {"key": "serverCertificate", "type": "str"},
}
- def __init__(self, *, server_certificate: Optional[str] = None, **kwargs):
+ def __init__(self, *, server_certificate: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword server_certificate: Certificate Data.
:paramtype server_certificate: str
@@ -3655,13 +4070,41 @@ def __init__(self, *, server_certificate: Optional[str] = None, **kwargs):
self.server_certificate = server_certificate
-class Workflow(ProxyResource): # pylint: disable=too-many-instance-attributes
+class UserAssignedIdentity(_serialization.Model):
+ """User assigned identity properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of the assigned identity.
+ :vartype principal_id: str
+ :ivar client_id: The client ID of the assigned identity.
+ :vartype client_id: str
+ """
+
+ _validation = {
+ "principal_id": {"readonly": True},
+ "client_id": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "principal_id": {"key": "principalId", "type": "str"},
+ "client_id": {"key": "clientId", "type": "str"},
+ }
+
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
+ super().__init__(**kwargs)
+ self.principal_id = None
+ self.client_id = None
+
+
+class Workflow(ProxyResource):
"""Workflow resource.
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar id: Fully qualified resource ID for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :ivar id: Fully qualified resource ID for the resource. E.g.
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}". # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -3723,8 +4166,8 @@ def __init__(
operation: Optional[Union[str, "_models.OperationDirection"]] = None,
steps: Optional[str] = None,
last_operation_id: Optional[str] = None,
- **kwargs
- ):
+ **kwargs: Any
+ ) -> None:
"""
:keyword last_step_name: last step name.
:paramtype last_step_name: str
@@ -3760,7 +4203,7 @@ class WorkflowArray(_serialization.Model):
"value": {"key": "value", "type": "[Workflow]"},
}
- def __init__(self, *, value: Optional[List["_models.Workflow"]] = None, **kwargs):
+ def __init__(self, *, value: Optional[List["_models.Workflow"]] = None, **kwargs: Any) -> None:
"""
:keyword value: Collection of workflow items.
:paramtype value: list[~azure.mgmt.storagesync.models.Workflow]
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/__init__.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/__init__.py
index 379e4078c187..465f08bed980 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/__init__.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/__init__.py
@@ -5,21 +5,27 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._operations import Operations
-from ._storage_sync_services_operations import StorageSyncServicesOperations
-from ._private_link_resources_operations import PrivateLinkResourcesOperations
-from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
-from ._sync_groups_operations import SyncGroupsOperations
-from ._cloud_endpoints_operations import CloudEndpointsOperations
-from ._server_endpoints_operations import ServerEndpointsOperations
-from ._registered_servers_operations import RegisteredServersOperations
-from ._workflows_operations import WorkflowsOperations
-from ._operation_status_operations import OperationStatusOperations
-from ._microsoft_storage_sync_operations import MicrosoftStorageSyncOperationsMixin
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._operations import Operations # type: ignore
+from ._storage_sync_services_operations import StorageSyncServicesOperations # type: ignore
+from ._private_link_resources_operations import PrivateLinkResourcesOperations # type: ignore
+from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations # type: ignore
+from ._sync_groups_operations import SyncGroupsOperations # type: ignore
+from ._cloud_endpoints_operations import CloudEndpointsOperations # type: ignore
+from ._server_endpoints_operations import ServerEndpointsOperations # type: ignore
+from ._registered_servers_operations import RegisteredServersOperations # type: ignore
+from ._workflows_operations import WorkflowsOperations # type: ignore
+from ._operation_status_operations import OperationStatusOperations # type: ignore
+from ._microsoft_storage_sync_operations import MicrosoftStorageSyncOperationsMixin # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -35,5 +41,5 @@
"OperationStatusOperations",
"MicrosoftStorageSyncOperationsMixin",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_cloud_endpoints_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_cloud_endpoints_operations.py
index cdbb0e0c126e..5700bd244f53 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_cloud_endpoints_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_cloud_endpoints_operations.py
@@ -6,8 +6,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -16,13 +17,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -30,12 +32,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import MicrosoftStorageSyncMixinABC, _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -54,8 +55,8 @@ def build_create_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -64,7 +65,7 @@ def build_create_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -73,7 +74,7 @@ def build_create_request(
"cloudEndpointName": _SERIALIZER.url("cloud_endpoint_name", cloud_endpoint_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -97,7 +98,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -106,7 +107,7 @@ def build_get_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -115,7 +116,7 @@ def build_get_request(
"cloudEndpointName": _SERIALIZER.url("cloud_endpoint_name", cloud_endpoint_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -137,7 +138,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -146,7 +147,7 @@ def build_delete_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -155,7 +156,7 @@ def build_delete_request(
"cloudEndpointName": _SERIALIZER.url("cloud_endpoint_name", cloud_endpoint_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -172,7 +173,7 @@ def build_list_by_sync_group_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -181,7 +182,7 @@ def build_list_by_sync_group_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -189,7 +190,7 @@ def build_list_by_sync_group_request(
"syncGroupName": _SERIALIZER.url("sync_group_name", sync_group_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -211,8 +212,8 @@ def build_pre_backup_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -221,7 +222,7 @@ def build_pre_backup_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/prebackup",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -230,7 +231,7 @@ def build_pre_backup_request(
"cloudEndpointName": _SERIALIZER.url("cloud_endpoint_name", cloud_endpoint_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -254,8 +255,8 @@ def build_post_backup_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -264,7 +265,7 @@ def build_post_backup_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/postbackup",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -273,7 +274,7 @@ def build_post_backup_request(
"cloudEndpointName": _SERIALIZER.url("cloud_endpoint_name", cloud_endpoint_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -297,8 +298,8 @@ def build_pre_restore_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -307,7 +308,7 @@ def build_pre_restore_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/prerestore",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -316,7 +317,7 @@ def build_pre_restore_request(
"cloudEndpointName": _SERIALIZER.url("cloud_endpoint_name", cloud_endpoint_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -340,7 +341,7 @@ def build_restoreheartbeat_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -349,7 +350,7 @@ def build_restoreheartbeat_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/restoreheartbeat",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -358,7 +359,7 @@ def build_restoreheartbeat_request(
"cloudEndpointName": _SERIALIZER.url("cloud_endpoint_name", cloud_endpoint_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -380,8 +381,8 @@ def build_post_restore_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -390,7 +391,7 @@ def build_post_restore_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/postrestore",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -399,7 +400,7 @@ def build_post_restore_request(
"cloudEndpointName": _SERIALIZER.url("cloud_endpoint_name", cloud_endpoint_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -423,8 +424,8 @@ def build_trigger_change_detection_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -433,7 +434,7 @@ def build_trigger_change_detection_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/triggerChangeDetection",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -442,7 +443,7 @@ def build_trigger_change_detection_request(
"cloudEndpointName": _SERIALIZER.url("cloud_endpoint_name", cloud_endpoint_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -455,7 +456,7 @@ def build_trigger_change_detection_request(
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
-def build_afs_share_metadata_certificate_public_keys_request(
+def build_afs_share_metadata_certificate_public_keys_request( # pylint: disable=name-too-long
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
@@ -466,7 +467,7 @@ def build_afs_share_metadata_certificate_public_keys_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -475,7 +476,7 @@ def build_afs_share_metadata_certificate_public_keys_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/afsShareMetadataCertificatePublicKeys",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -484,7 +485,7 @@ def build_afs_share_metadata_certificate_public_keys_request(
"cloudEndpointName": _SERIALIZER.url("cloud_endpoint_name", cloud_endpoint_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -520,10 +521,10 @@ def _create_initial(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.CloudEndpointCreateParameters, IO],
+ parameters: Union[_models.CloudEndpointCreateParameters, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.CloudEndpoint]:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -534,21 +535,19 @@ def _create_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.CloudEndpoint]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "CloudEndpointCreateParameters")
- request = build_create_request(
+ _request = build_create_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -558,25 +557,28 @@ def _create_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
if response.status_code == 200:
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
@@ -584,8 +586,6 @@ def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("CloudEndpoint", pipeline_response)
-
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
@@ -597,12 +597,12 @@ def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
- return deserialized
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}"} # type: ignore
+ return deserialized # type: ignore
@overload
def begin_create(
@@ -632,14 +632,6 @@ def begin_create(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either CloudEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.CloudEndpoint]
@@ -653,7 +645,7 @@ def begin_create(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -670,18 +662,10 @@ def begin_create(
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Cloud Endpoint resource. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either CloudEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.CloudEndpoint]
@@ -695,7 +679,7 @@ def begin_create(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.CloudEndpointCreateParameters, IO],
+ parameters: Union[_models.CloudEndpointCreateParameters, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.CloudEndpoint]:
"""Create a new CloudEndpoint.
@@ -709,20 +693,9 @@ def begin_create(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :param parameters: Body of Cloud Endpoint resource. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.storagesync.models.CloudEndpointCreateParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Cloud Endpoint resource. Is either a CloudEndpointCreateParameters
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.CloudEndpointCreateParameters or IO[bytes]
:return: An instance of LROPoller that returns either CloudEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.CloudEndpoint]
@@ -731,16 +704,14 @@ def begin_create(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.CloudEndpoint]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.CloudEndpoint] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._create_initial( # type: ignore
+ raw_result = self._create_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -753,6 +724,7 @@ def begin_create(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
@@ -763,27 +735,27 @@ def get_long_running_output(pipeline_response):
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("CloudEndpoint", pipeline_response)
+ deserialized = self._deserialize("CloudEndpoint", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.CloudEndpoint].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}"} # type: ignore
+ return LROPoller[_models.CloudEndpoint](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace
def get(
@@ -805,12 +777,11 @@ def get(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: CloudEndpoint or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.CloudEndpoint
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -821,27 +792,24 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.CloudEndpoint]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.CloudEndpoint] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -857,24 +825,22 @@ def get(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("CloudEndpoint", pipeline_response)
+ deserialized = self._deserialize("CloudEndpoint", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- return deserialized
+ return deserialized # type: ignore
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}"} # type: ignore
-
- def _delete_initial( # pylint: disable=inconsistent-return-statements
+ def _delete_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -885,32 +851,34 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -933,10 +901,12 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(
@@ -958,14 +928,6 @@ def begin_delete(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -973,15 +935,13 @@ def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -992,28 +952,27 @@ def begin_delete(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}"} # type: ignore
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace
def list_by_sync_group(
@@ -1028,7 +987,6 @@ def list_by_sync_group(
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CloudEndpoint or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storagesync.models.CloudEndpoint]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1036,12 +994,10 @@ def list_by_sync_group(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.CloudEndpointArray]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.CloudEndpointArray] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1052,18 +1008,16 @@ def list_by_sync_group(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_sync_group_request(
+ _request = build_list_by_sync_group_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_sync_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1075,26 +1029,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("CloudEndpointArray", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1107,18 +1061,16 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list_by_sync_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints"} # type: ignore
-
- def _pre_backup_initial( # pylint: disable=inconsistent-return-statements
+ def _pre_backup_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.BackupRequest, IO],
+ parameters: Union[_models.BackupRequest, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1129,21 +1081,19 @@ def _pre_backup_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "BackupRequest")
- request = build_pre_backup_request(
+ _request = build_pre_backup_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1153,43 +1103,41 @@ def _pre_backup_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._pre_backup_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
- if response.status_code == 200:
- response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
- response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
- response_headers["x-ms-correlation-request-id"] = self._deserialize(
- "str", response.headers.get("x-ms-correlation-request-id")
- )
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-correlation-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-correlation-request-id")
+ )
- if response.status_code == 202:
- response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
- response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
- response_headers["x-ms-correlation-request-id"] = self._deserialize(
- "str", response.headers.get("x-ms-correlation-request-id")
- )
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _pre_backup_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/prebackup"} # type: ignore
+ return deserialized # type: ignore
@overload
def begin_pre_backup(
@@ -1219,14 +1167,6 @@ def begin_pre_backup(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1239,7 +1179,7 @@ def begin_pre_backup(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -1256,18 +1196,10 @@ def begin_pre_backup(
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Backup request. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1280,7 +1212,7 @@ def begin_pre_backup(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.BackupRequest, IO],
+ parameters: Union[_models.BackupRequest, IO[bytes]],
**kwargs: Any
) -> LROPoller[None]:
"""Pre Backup a given CloudEndpoint.
@@ -1294,19 +1226,9 @@ def begin_pre_backup(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :param parameters: Body of Backup request. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.storagesync.models.BackupRequest or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Backup request. Is either a BackupRequest type or a IO[bytes] type.
+ Required.
+ :type parameters: ~azure.mgmt.storagesync.models.BackupRequest or IO[bytes]
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1314,16 +1236,14 @@ def begin_pre_backup(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._pre_backup_initial( # type: ignore
+ raw_result = self._pre_backup_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1336,28 +1256,27 @@ def begin_pre_backup(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_pre_backup.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/prebackup"} # type: ignore
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
def _post_backup_initial(
self,
@@ -1365,10 +1284,10 @@ def _post_backup_initial(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.BackupRequest, IO],
+ parameters: Union[_models.BackupRequest, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.PostBackupResponse]:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1379,21 +1298,19 @@ def _post_backup_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.PostBackupResponse]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "BackupRequest")
- request = build_post_backup_request(
+ _request = build_post_backup_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1403,48 +1320,41 @@ def _post_backup_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._post_backup_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
- if response.status_code == 200:
- response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
- response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
- response_headers["x-ms-correlation-request-id"] = self._deserialize(
- "str", response.headers.get("x-ms-correlation-request-id")
- )
-
- deserialized = self._deserialize("PostBackupResponse", pipeline_response)
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-correlation-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-correlation-request-id")
+ )
- if response.status_code == 202:
- response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
- response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
- response_headers["x-ms-correlation-request-id"] = self._deserialize(
- "str", response.headers.get("x-ms-correlation-request-id")
- )
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
-
- return deserialized
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _post_backup_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/postbackup"} # type: ignore
+ return deserialized # type: ignore
@overload
def begin_post_backup(
@@ -1474,14 +1384,6 @@ def begin_post_backup(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either PostBackupResponse or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.PostBackupResponse]
@@ -1495,7 +1397,7 @@ def begin_post_backup(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -1512,18 +1414,10 @@ def begin_post_backup(
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Backup request. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either PostBackupResponse or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.PostBackupResponse]
@@ -1537,7 +1431,7 @@ def begin_post_backup(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.BackupRequest, IO],
+ parameters: Union[_models.BackupRequest, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.PostBackupResponse]:
"""Post Backup a given CloudEndpoint.
@@ -1551,19 +1445,9 @@ def begin_post_backup(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :param parameters: Body of Backup request. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.storagesync.models.BackupRequest or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Backup request. Is either a BackupRequest type or a IO[bytes] type.
+ Required.
+ :type parameters: ~azure.mgmt.storagesync.models.BackupRequest or IO[bytes]
:return: An instance of LROPoller that returns either PostBackupResponse or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.PostBackupResponse]
@@ -1572,16 +1456,14 @@ def begin_post_backup(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.PostBackupResponse]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.PostBackupResponse] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._post_backup_initial( # type: ignore
+ raw_result = self._post_backup_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1594,6 +1476,7 @@ def begin_post_backup(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
@@ -1605,38 +1488,38 @@ def get_long_running_output(pipeline_response):
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("PostBackupResponse", pipeline_response)
+ deserialized = self._deserialize("PostBackupResponse", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.PostBackupResponse].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_post_backup.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/postbackup"} # type: ignore
+ return LROPoller[_models.PostBackupResponse](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- def _pre_restore_initial( # pylint: disable=inconsistent-return-statements
+ def _pre_restore_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.PreRestoreRequest, IO],
+ parameters: Union[_models.PreRestoreRequest, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1647,21 +1530,19 @@ def _pre_restore_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PreRestoreRequest")
- request = build_pre_restore_request(
+ _request = build_pre_restore_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1671,20 +1552,24 @@ def _pre_restore_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._pre_restore_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -1697,10 +1582,12 @@ def _pre_restore_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _pre_restore_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/prerestore"} # type: ignore
+ return deserialized # type: ignore
@overload
def begin_pre_restore(
@@ -1730,14 +1617,6 @@ def begin_pre_restore(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1750,7 +1629,7 @@ def begin_pre_restore(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -1767,18 +1646,10 @@ def begin_pre_restore(
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Cloud Endpoint object. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1791,7 +1662,7 @@ def begin_pre_restore(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.PreRestoreRequest, IO],
+ parameters: Union[_models.PreRestoreRequest, IO[bytes]],
**kwargs: Any
) -> LROPoller[None]:
"""Pre Restore a given CloudEndpoint.
@@ -1805,20 +1676,9 @@ def begin_pre_restore(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :param parameters: Body of Cloud Endpoint object. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.storagesync.models.PreRestoreRequest or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Cloud Endpoint object. Is either a PreRestoreRequest type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.PreRestoreRequest or IO[bytes]
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1826,16 +1686,14 @@ def begin_pre_restore(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._pre_restore_initial( # type: ignore
+ raw_result = self._pre_restore_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1848,28 +1706,27 @@ def begin_pre_restore(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_pre_restore.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/prerestore"} # type: ignore
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace
def restoreheartbeat( # pylint: disable=inconsistent-return-statements
@@ -1891,12 +1748,11 @@ def restoreheartbeat( # pylint: disable=inconsistent-return-statements
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1907,27 +1763,24 @@ def restoreheartbeat( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
- request = build_restoreheartbeat_request(
+ _request = build_restoreheartbeat_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.restoreheartbeat.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1944,20 +1797,18 @@ def restoreheartbeat( # pylint: disable=inconsistent-return-statements
)
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, None, response_headers) # type: ignore
- restoreheartbeat.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/restoreheartbeat"} # type: ignore
-
- def _post_restore_initial( # pylint: disable=inconsistent-return-statements
+ def _post_restore_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.PostRestoreRequest, IO],
+ parameters: Union[_models.PostRestoreRequest, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1968,21 +1819,19 @@ def _post_restore_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PostRestoreRequest")
- request = build_post_restore_request(
+ _request = build_post_restore_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1992,20 +1841,24 @@ def _post_restore_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._post_restore_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -2018,10 +1871,12 @@ def _post_restore_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _post_restore_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/postrestore"} # type: ignore
+ return deserialized # type: ignore
@overload
def begin_post_restore(
@@ -2051,14 +1906,6 @@ def begin_post_restore(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -2071,7 +1918,7 @@ def begin_post_restore(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -2088,18 +1935,10 @@ def begin_post_restore(
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Body of Cloud Endpoint object. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -2112,7 +1951,7 @@ def begin_post_restore(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.PostRestoreRequest, IO],
+ parameters: Union[_models.PostRestoreRequest, IO[bytes]],
**kwargs: Any
) -> LROPoller[None]:
"""Post Restore a given CloudEndpoint.
@@ -2126,20 +1965,9 @@ def begin_post_restore(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :param parameters: Body of Cloud Endpoint object. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.storagesync.models.PostRestoreRequest or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Cloud Endpoint object. Is either a PostRestoreRequest type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.PostRestoreRequest or IO[bytes]
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -2147,16 +1975,14 @@ def begin_post_restore(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._post_restore_initial( # type: ignore
+ raw_result = self._post_restore_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -2169,39 +1995,38 @@ def begin_post_restore(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_post_restore.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/postrestore"} # type: ignore
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _trigger_change_detection_initial( # pylint: disable=inconsistent-return-statements
+ def _trigger_change_detection_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.TriggerChangeDetectionParameters, IO],
+ parameters: Union[_models.TriggerChangeDetectionParameters, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2212,21 +2037,19 @@ def _trigger_change_detection_initial( # pylint: disable=inconsistent-return-st
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "TriggerChangeDetectionParameters")
- request = build_trigger_change_detection_request(
+ _request = build_trigger_change_detection_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -2236,20 +2059,24 @@ def _trigger_change_detection_initial( # pylint: disable=inconsistent-return-st
content_type=content_type,
json=_json,
content=_content,
- template_url=self._trigger_change_detection_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -2262,10 +2089,12 @@ def _trigger_change_detection_initial( # pylint: disable=inconsistent-return-st
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _trigger_change_detection_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/triggerChangeDetection"} # type: ignore
+ return deserialized # type: ignore
@overload
def begin_trigger_change_detection(
@@ -2296,14 +2125,6 @@ def begin_trigger_change_detection(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -2316,7 +2137,7 @@ def begin_trigger_change_detection(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -2334,18 +2155,10 @@ def begin_trigger_change_detection(
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
:param parameters: Trigger Change Detection Action parameters. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -2358,7 +2171,7 @@ def begin_trigger_change_detection(
storage_sync_service_name: str,
sync_group_name: str,
cloud_endpoint_name: str,
- parameters: Union[_models.TriggerChangeDetectionParameters, IO],
+ parameters: Union[_models.TriggerChangeDetectionParameters, IO[bytes]],
**kwargs: Any
) -> LROPoller[None]:
"""Triggers detection of changes performed on Azure File share connected to the specified Azure
@@ -2373,20 +2186,9 @@ def begin_trigger_change_detection(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :param parameters: Trigger Change Detection Action parameters. Is either a model type or a IO
- type. Required.
- :type parameters: ~azure.mgmt.storagesync.models.TriggerChangeDetectionParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Trigger Change Detection Action parameters. Is either a
+ TriggerChangeDetectionParameters type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.TriggerChangeDetectionParameters or IO[bytes]
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -2394,16 +2196,14 @@ def begin_trigger_change_detection(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._trigger_change_detection_initial( # type: ignore
+ raw_result = self._trigger_change_detection_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -2416,31 +2216,30 @@ def begin_trigger_change_detection(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_trigger_change_detection.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/triggerChangeDetection"} # type: ignore
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace
- def afs_share_metadata_certificate_public_keys(
+ def afs_share_metadata_certificate_public_keys( # pylint: disable=name-too-long
self,
resource_group_name: str,
storage_sync_service_name: str,
@@ -2459,12 +2258,11 @@ def afs_share_metadata_certificate_public_keys(
:type sync_group_name: str
:param cloud_endpoint_name: Name of Cloud Endpoint object. Required.
:type cloud_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: CloudEndpointAfsShareMetadataCertificatePublicKeys or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.CloudEndpointAfsShareMetadataCertificatePublicKeys
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2475,27 +2273,24 @@ def afs_share_metadata_certificate_public_keys(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.CloudEndpointAfsShareMetadataCertificatePublicKeys]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.CloudEndpointAfsShareMetadataCertificatePublicKeys] = kwargs.pop("cls", None)
- request = build_afs_share_metadata_certificate_public_keys_request(
+ _request = build_afs_share_metadata_certificate_public_keys_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
cloud_endpoint_name=cloud_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.afs_share_metadata_certificate_public_keys.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -2511,11 +2306,11 @@ def afs_share_metadata_certificate_public_keys(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("CloudEndpointAfsShareMetadataCertificatePublicKeys", pipeline_response)
+ deserialized = self._deserialize(
+ "CloudEndpointAfsShareMetadataCertificatePublicKeys", pipeline_response.http_response
+ )
if cls:
- return cls(pipeline_response, deserialized, response_headers)
-
- return deserialized
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- afs_share_metadata_certificate_public_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/cloudEndpoints/{cloudEndpointName}/afsShareMetadataCertificatePublicKeys"} # type: ignore
+ return deserialized # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_microsoft_storage_sync_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_microsoft_storage_sync_operations.py
index 86baaa226cd2..2397d87ff65d 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_microsoft_storage_sync_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_microsoft_storage_sync_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -18,20 +17,19 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import MicrosoftStorageSyncMixinABC, _convert_request, _format_url_section
+from .._vendor import MicrosoftStorageSyncMixinABC
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -45,7 +43,7 @@ def build_location_operation_status_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -54,12 +52,12 @@ def build_location_operation_status_request(
"/subscriptions/{subscriptionId}/providers/Microsoft.StorageSync/locations/{locationName}/operations/{operationId}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"locationName": _SERIALIZER.url("location_name", location_name, "str"),
"operationId": _SERIALIZER.url("operation_id", operation_id, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -71,6 +69,7 @@ def build_location_operation_status_request(
class MicrosoftStorageSyncOperationsMixin(MicrosoftStorageSyncMixinABC):
+
@distributed_trace
def location_operation_status(
self, location_name: str, operation_id: str, **kwargs: Any
@@ -81,12 +80,11 @@ def location_operation_status(
:type location_name: str
:param operation_id: operation Id. Required.
:type operation_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: LocationOperationStatus or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.LocationOperationStatus
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -97,25 +95,22 @@ def location_operation_status(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.LocationOperationStatus]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.LocationOperationStatus] = kwargs.pop("cls", None)
- request = build_location_operation_status_request(
+ _request = build_location_operation_status_request(
location_name=location_name,
operation_id=operation_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.location_operation_status.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -131,11 +126,9 @@ def location_operation_status(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("LocationOperationStatus", pipeline_response)
+ deserialized = self._deserialize("LocationOperationStatus", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
-
- return deserialized
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- location_operation_status.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StorageSync/locations/{locationName}/operations/{operationId}"} # type: ignore
+ return deserialized # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_operation_status_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_operation_status_operations.py
index 411a6a69cf26..e4a4763d9d5d 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_operation_status_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_operation_status_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -18,20 +17,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import MicrosoftStorageSyncMixinABC, _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -50,7 +47,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -59,7 +56,7 @@ def build_get_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/locations/{locationName}/workflows/{workflowId}/operations/{operationId}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -68,7 +65,7 @@ def build_get_request(
"operationId": _SERIALIZER.url("operation_id", operation_id, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -113,12 +110,11 @@ def get(
:type workflow_id: str
:param operation_id: operation Id. Required.
:type operation_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: OperationStatus or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.OperationStatus
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -129,27 +125,24 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationStatus]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.OperationStatus] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
location_name=location_name,
workflow_id=workflow_id,
operation_id=operation_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -165,11 +158,9 @@ def get(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("OperationStatus", pipeline_response)
+ deserialized = self._deserialize("OperationStatus", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- return deserialized
-
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/locations/{locationName}/workflows/{workflowId}/operations/{operationId}"} # type: ignore
+ return deserialized # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_operations.py
index ba9752e66c01..13cbdfb531b3 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -20,20 +19,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import MicrosoftStorageSyncMixinABC, _convert_request
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -45,7 +42,7 @@ def build_list_request(**kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -83,7 +80,6 @@ def __init__(self, *args, **kwargs):
def list(self, **kwargs: Any) -> Iterable["_models.OperationEntity"]:
"""Lists all of the available Storage Sync Rest API operations.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OperationEntity or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storagesync.models.OperationEntity]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -91,12 +87,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.OperationEntity"]:
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationEntityListResult]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.OperationEntityListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -107,14 +101,12 @@ def list(self, **kwargs: Any) -> Iterable["_models.OperationEntity"]:
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -126,26 +118,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("OperationEntityListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -157,5 +149,3 @@ def get_next(next_link=None):
return pipeline_response
return ItemPaged(get_next, extract_data)
-
- list.metadata = {"url": "/providers/Microsoft.StorageSync/operations"} # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_private_endpoint_connections_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_private_endpoint_connections_operations.py
index c61ef1eda613..0737ea9f16d0 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_private_endpoint_connections_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_private_endpoint_connections_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,8 +5,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -16,13 +16,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -30,12 +31,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import MicrosoftStorageSyncMixinABC, _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -53,7 +53,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -66,13 +66,13 @@ def build_get_request(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"storageSyncServiceName": _SERIALIZER.url("storage_sync_service_name", storage_sync_service_name, "str"),
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"privateEndpointConnectionName": _SERIALIZER.url(
"private_endpoint_connection_name", private_endpoint_connection_name, "str"
),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -93,8 +93,8 @@ def build_create_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -107,13 +107,13 @@ def build_create_request(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"storageSyncServiceName": _SERIALIZER.url("storage_sync_service_name", storage_sync_service_name, "str"),
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"privateEndpointConnectionName": _SERIALIZER.url(
"private_endpoint_connection_name", private_endpoint_connection_name, "str"
),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -136,7 +136,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -149,13 +149,13 @@ def build_delete_request(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"storageSyncServiceName": _SERIALIZER.url("storage_sync_service_name", storage_sync_service_name, "str"),
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"privateEndpointConnectionName": _SERIALIZER.url(
"private_endpoint_connection_name", private_endpoint_connection_name, "str"
),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -166,13 +166,13 @@ def build_delete_request(
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
-def build_list_by_storage_sync_service_request(
+def build_list_by_storage_sync_service_request( # pylint: disable=name-too-long
resource_group_name: str, storage_sync_service_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -181,14 +181,14 @@ def build_list_by_storage_sync_service_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/privateEndpointConnections",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"storageSyncServiceName": _SERIALIZER.url("storage_sync_service_name", storage_sync_service_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -237,12 +237,11 @@ def get(
:param private_endpoint_connection_name: The name of the private endpoint connection associated
with the Azure resource. Required.
:type private_endpoint_connection_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateEndpointConnection or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.PrivateEndpointConnection
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -253,26 +252,23 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnection]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -282,24 +278,22 @@ def get(
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
+ deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- return deserialized
-
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore
+ return deserialized # type: ignore
def _create_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
private_endpoint_connection_name: str,
- properties: Union[_models.PrivateEndpointConnection, IO],
+ properties: Union[_models.PrivateEndpointConnection, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.PrivateEndpointConnection]:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -310,21 +304,19 @@ def _create_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.PrivateEndpointConnection]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(properties, (IO, bytes)):
+ if isinstance(properties, (IOBase, bytes)):
_content = properties
else:
_json = self._serialize.body(properties, "PrivateEndpointConnection")
- request = build_create_request(
+ _request = build_create_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
private_endpoint_connection_name=private_endpoint_connection_name,
@@ -333,29 +325,29 @@ def _create_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
- if response.status_code == 200:
- deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
-
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
@@ -367,12 +359,12 @@ def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
- return deserialized
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore
+ return deserialized # type: ignore
@overload
def begin_create(
@@ -402,14 +394,6 @@ def begin_create(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either PrivateEndpointConnection or the result
of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.PrivateEndpointConnection]
@@ -422,7 +406,7 @@ def begin_create(
resource_group_name: str,
storage_sync_service_name: str,
private_endpoint_connection_name: str,
- properties: IO,
+ properties: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -440,18 +424,10 @@ def begin_create(
with the Azure resource. Required.
:type private_endpoint_connection_name: str
:param properties: The private endpoint connection properties. Required.
- :type properties: IO
+ :type properties: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either PrivateEndpointConnection or the result
of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.PrivateEndpointConnection]
@@ -464,7 +440,7 @@ def begin_create(
resource_group_name: str,
storage_sync_service_name: str,
private_endpoint_connection_name: str,
- properties: Union[_models.PrivateEndpointConnection, IO],
+ properties: Union[_models.PrivateEndpointConnection, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.PrivateEndpointConnection]:
"""Update the state of specified private endpoint connection associated with the storage sync
@@ -479,20 +455,9 @@ def begin_create(
:param private_endpoint_connection_name: The name of the private endpoint connection associated
with the Azure resource. Required.
:type private_endpoint_connection_name: str
- :param properties: The private endpoint connection properties. Is either a model type or a IO
- type. Required.
- :type properties: ~azure.mgmt.storagesync.models.PrivateEndpointConnection or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param properties: The private endpoint connection properties. Is either a
+ PrivateEndpointConnection type or a IO[bytes] type. Required.
+ :type properties: ~azure.mgmt.storagesync.models.PrivateEndpointConnection or IO[bytes]
:return: An instance of LROPoller that returns either PrivateEndpointConnection or the result
of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.PrivateEndpointConnection]
@@ -501,16 +466,14 @@ def begin_create(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnection]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._create_initial( # type: ignore
+ raw_result = self._create_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
private_endpoint_connection_name=private_endpoint_connection_name,
@@ -522,39 +485,40 @@ def begin_create(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
+ deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.PrivateEndpointConnection].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore
+ return LROPoller[_models.PrivateEndpointConnection](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- def _delete_initial( # pylint: disable=inconsistent-return-statements
+ def _delete_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
private_endpoint_connection_name: str,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -565,31 +529,33 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -606,10 +572,12 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(
@@ -630,14 +598,6 @@ def begin_delete(
:param private_endpoint_connection_name: The name of the private endpoint connection associated
with the Azure resource. Required.
:type private_endpoint_connection_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -645,15 +605,13 @@ def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
private_endpoint_connection_name=private_endpoint_connection_name,
@@ -663,28 +621,27 @@ def begin_delete(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace
def list_by_storage_sync_service(
@@ -697,7 +654,6 @@ def list_by_storage_sync_service(
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PrivateEndpointConnection or the result of
cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storagesync.models.PrivateEndpointConnection]
@@ -706,12 +662,10 @@ def list_by_storage_sync_service(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnectionListResult]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.PrivateEndpointConnectionListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -722,17 +676,15 @@ def list_by_storage_sync_service(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_storage_sync_service_request(
+ _request = build_list_by_storage_sync_service_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_storage_sync_service.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -744,26 +696,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("PrivateEndpointConnectionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -775,5 +727,3 @@ def get_next(next_link=None):
return pipeline_response
return ItemPaged(get_next, extract_data)
-
- list_by_storage_sync_service.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/privateEndpointConnections"} # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_private_link_resources_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_private_link_resources_operations.py
index ba902ce86b89..79a494e3d3cb 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_private_link_resources_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_private_link_resources_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -18,20 +17,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import MicrosoftStorageSyncMixinABC, _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -39,13 +36,13 @@
_SERIALIZER.client_side_validation = False
-def build_list_by_storage_sync_service_request(
+def build_list_by_storage_sync_service_request( # pylint: disable=name-too-long
resource_group_name: str, storage_sync_service_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -58,10 +55,10 @@ def build_list_by_storage_sync_service_request(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"storageSyncServiceName": _SERIALIZER.url("storage_sync_service_name", storage_sync_service_name, "str"),
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -103,12 +100,11 @@ def list_by_storage_sync_service(
:param storage_sync_service_name: The name of the storage sync service name within the
specified resource group. Required.
:type storage_sync_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateLinkResourceListResult or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.PrivateLinkResourceListResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -119,38 +115,34 @@ def list_by_storage_sync_service(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateLinkResourceListResult]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.PrivateLinkResourceListResult] = kwargs.pop("cls", None)
- request = build_list_by_storage_sync_service_request(
+ _request = build_list_by_storage_sync_service_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_storage_sync_service.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response)
+ deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- return deserialized
-
- list_by_storage_sync_service.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/privateLinkResources"} # type: ignore
+ return deserialized # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_registered_servers_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_registered_servers_operations.py
index e2a58ca1a2f1..c27246945bcb 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_registered_servers_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_registered_servers_operations.py
@@ -6,8 +6,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -16,13 +17,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -30,12 +32,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import MicrosoftStorageSyncMixinABC, _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -43,13 +44,13 @@
_SERIALIZER.client_side_validation = False
-def build_list_by_storage_sync_service_request(
+def build_list_by_storage_sync_service_request( # pylint: disable=name-too-long
resource_group_name: str, storage_sync_service_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -58,14 +59,14 @@ def build_list_by_storage_sync_service_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"storageSyncServiceName": _SERIALIZER.url("storage_sync_service_name", storage_sync_service_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -82,7 +83,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -91,7 +92,7 @@ def build_get_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -99,7 +100,7 @@ def build_get_request(
"serverId": _SERIALIZER.url("server_id", server_id, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -116,8 +117,8 @@ def build_create_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -126,7 +127,7 @@ def build_create_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -134,7 +135,7 @@ def build_create_request(
"serverId": _SERIALIZER.url("server_id", server_id, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -147,13 +148,50 @@ def build_create_request(
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+def build_update_request(
+ resource_group_name: str, storage_sync_service_name: str, server_id: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
+ "resourceGroupName": _SERIALIZER.url(
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
+ ),
+ "storageSyncServiceName": _SERIALIZER.url("storage_sync_service_name", storage_sync_service_name, "str"),
+ "serverId": _SERIALIZER.url("server_id", server_id, "str"),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ if content_type is not None:
+ _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
+
+
def build_delete_request(
resource_group_name: str, storage_sync_service_name: str, server_id: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -162,7 +200,7 @@ def build_delete_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -170,7 +208,7 @@ def build_delete_request(
"serverId": _SERIALIZER.url("server_id", server_id, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -187,8 +225,8 @@ def build_trigger_rollover_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -197,7 +235,7 @@ def build_trigger_rollover_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}/triggerRollover",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -205,7 +243,7 @@ def build_trigger_rollover_request(
"serverId": _SERIALIZER.url("server_id", server_id, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -248,7 +286,6 @@ def list_by_storage_sync_service(
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RegisteredServer or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storagesync.models.RegisteredServer]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -256,12 +293,10 @@ def list_by_storage_sync_service(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.RegisteredServerArray]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.RegisteredServerArray] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -272,17 +307,15 @@ def list_by_storage_sync_service(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_storage_sync_service_request(
+ _request = build_list_by_storage_sync_service_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_storage_sync_service.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -294,26 +327,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("RegisteredServerArray", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -326,8 +359,6 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list_by_storage_sync_service.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers"} # type: ignore
-
@distributed_trace
def get(
self, resource_group_name: str, storage_sync_service_name: str, server_id: str, **kwargs: Any
@@ -341,12 +372,11 @@ def get(
:type storage_sync_service_name: str
:param server_id: GUID identifying the on-premises server. Required.
:type server_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: RegisteredServer or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.RegisteredServer
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -357,26 +387,23 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.RegisteredServer]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.RegisteredServer] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
server_id=server_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -392,24 +419,22 @@ def get(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("RegisteredServer", pipeline_response)
+ deserialized = self._deserialize("RegisteredServer", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- return deserialized
-
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}"} # type: ignore
+ return deserialized # type: ignore
def _create_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
server_id: str,
- parameters: Union[_models.RegisteredServerCreateParameters, IO],
+ parameters: Union[_models.RegisteredServerCreateParameters, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.RegisteredServer]:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -420,21 +445,19 @@ def _create_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.RegisteredServer]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "RegisteredServerCreateParameters")
- request = build_create_request(
+ _request = build_create_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
server_id=server_id,
@@ -443,25 +466,28 @@ def _create_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
if response.status_code == 200:
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
@@ -469,8 +495,6 @@ def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("RegisteredServer", pipeline_response)
-
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
@@ -481,12 +505,12 @@ def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
- return deserialized
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}"} # type: ignore
+ return deserialized # type: ignore
@overload
def begin_create(
@@ -513,14 +537,6 @@ def begin_create(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either RegisteredServer or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.RegisteredServer]
@@ -533,7 +549,7 @@ def begin_create(
resource_group_name: str,
storage_sync_service_name: str,
server_id: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -548,18 +564,10 @@ def begin_create(
:param server_id: GUID identifying the on-premises server. Required.
:type server_id: str
:param parameters: Body of Registered Server object. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either RegisteredServer or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.RegisteredServer]
@@ -572,7 +580,7 @@ def begin_create(
resource_group_name: str,
storage_sync_service_name: str,
server_id: str,
- parameters: Union[_models.RegisteredServerCreateParameters, IO],
+ parameters: Union[_models.RegisteredServerCreateParameters, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.RegisteredServer]:
"""Add a new registered server.
@@ -584,20 +592,238 @@ def begin_create(
:type storage_sync_service_name: str
:param server_id: GUID identifying the on-premises server. Required.
:type server_id: str
- :param parameters: Body of Registered Server object. Is either a model type or a IO type.
+ :param parameters: Body of Registered Server object. Is either a
+ RegisteredServerCreateParameters type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.RegisteredServerCreateParameters or IO[bytes]
+ :return: An instance of LROPoller that returns either RegisteredServer or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.RegisteredServer]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.RegisteredServer] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = self._create_initial(
+ resource_group_name=resource_group_name,
+ storage_sync_service_name=storage_sync_service_name,
+ server_id=server_id,
+ parameters=parameters,
+ api_version=api_version,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-correlation-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-correlation-request-id")
+ )
+
+ deserialized = self._deserialize("RegisteredServer", pipeline_response.http_response)
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+ return deserialized
+
+ if polling is True:
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
+ elif polling is False:
+ polling_method = cast(PollingMethod, NoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return LROPoller[_models.RegisteredServer].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return LROPoller[_models.RegisteredServer](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ def _update_initial(
+ self,
+ resource_group_name: str,
+ storage_sync_service_name: str,
+ server_id: str,
+ parameters: Union[_models.RegisteredServerUpdateParameters, IO[bytes]],
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(parameters, (IOBase, bytes)):
+ _content = parameters
+ else:
+ _json = self._serialize.body(parameters, "RegisteredServerUpdateParameters")
+
+ _request = build_update_request(
+ resource_group_name=resource_group_name,
+ storage_sync_service_name=storage_sync_service_name,
+ server_id=server_id,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ content_type=content_type,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-correlation-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-correlation-request-id")
+ )
+
+ if response.status_code == 202:
+ response_headers["Azure-AsyncOperation"] = self._deserialize(
+ "str", response.headers.get("Azure-AsyncOperation")
+ )
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-correlation-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-correlation-request-id")
+ )
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ def begin_update(
+ self,
+ resource_group_name: str,
+ storage_sync_service_name: str,
+ server_id: str,
+ parameters: _models.RegisteredServerUpdateParameters,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.RegisteredServer]:
+ """Update registered server.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param storage_sync_service_name: Name of Storage Sync Service resource. Required.
+ :type storage_sync_service_name: str
+ :param server_id: GUID identifying the on-premises server. Required.
+ :type server_id: str
+ :param parameters: Body of Registered Server object. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.RegisteredServerUpdateParameters
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns either RegisteredServer or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.RegisteredServer]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ def begin_update(
+ self,
+ resource_group_name: str,
+ storage_sync_service_name: str,
+ server_id: str,
+ parameters: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.RegisteredServer]:
+ """Update registered server.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
- :type parameters: ~azure.mgmt.storagesync.models.RegisteredServerCreateParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
+ :type resource_group_name: str
+ :param storage_sync_service_name: Name of Storage Sync Service resource. Required.
+ :type storage_sync_service_name: str
+ :param server_id: GUID identifying the on-premises server. Required.
+ :type server_id: str
+ :param parameters: Body of Registered Server object. Required.
+ :type parameters: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :return: An instance of LROPoller that returns either RegisteredServer or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.RegisteredServer]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace
+ def begin_update(
+ self,
+ resource_group_name: str,
+ storage_sync_service_name: str,
+ server_id: str,
+ parameters: Union[_models.RegisteredServerUpdateParameters, IO[bytes]],
+ **kwargs: Any
+ ) -> LROPoller[_models.RegisteredServer]:
+ """Update registered server.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param storage_sync_service_name: Name of Storage Sync Service resource. Required.
+ :type storage_sync_service_name: str
+ :param server_id: GUID identifying the on-premises server. Required.
+ :type server_id: str
+ :param parameters: Body of Registered Server object. Is either a
+ RegisteredServerUpdateParameters type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.RegisteredServerUpdateParameters or IO[bytes]
:return: An instance of LROPoller that returns either RegisteredServer or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.RegisteredServer]
@@ -606,16 +832,14 @@ def begin_create(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.RegisteredServer]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.RegisteredServer] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._create_initial( # type: ignore
+ raw_result = self._update_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
server_id=server_id,
@@ -627,6 +851,7 @@ def begin_create(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
@@ -637,32 +862,32 @@ def get_long_running_output(pipeline_response):
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("RegisteredServer", pipeline_response)
+ deserialized = self._deserialize("RegisteredServer", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.RegisteredServer].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}"} # type: ignore
+ return LROPoller[_models.RegisteredServer](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- def _delete_initial( # pylint: disable=inconsistent-return-statements
+ def _delete_initial(
self, resource_group_name: str, storage_sync_service_name: str, server_id: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -673,31 +898,33 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
server_id=server_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -716,10 +943,12 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(
@@ -734,14 +963,6 @@ def begin_delete(
:type storage_sync_service_name: str
:param server_id: GUID identifying the on-premises server. Required.
:type server_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -749,15 +970,13 @@ def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
server_id=server_id,
@@ -767,38 +986,37 @@ def begin_delete(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}"} # type: ignore
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _trigger_rollover_initial( # pylint: disable=inconsistent-return-statements
+ def _trigger_rollover_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
server_id: str,
- parameters: Union[_models.TriggerRolloverRequest, IO],
+ parameters: Union[_models.TriggerRolloverRequest, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -809,21 +1027,19 @@ def _trigger_rollover_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "TriggerRolloverRequest")
- request = build_trigger_rollover_request(
+ _request = build_trigger_rollover_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
server_id=server_id,
@@ -832,20 +1048,24 @@ def _trigger_rollover_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._trigger_rollover_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -864,10 +1084,12 @@ def _trigger_rollover_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _trigger_rollover_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}/triggerRollover"} # type: ignore
+ return deserialized # type: ignore
@overload
def begin_trigger_rollover(
@@ -894,14 +1116,6 @@ def begin_trigger_rollover(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -913,7 +1127,7 @@ def begin_trigger_rollover(
resource_group_name: str,
storage_sync_service_name: str,
server_id: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -928,18 +1142,10 @@ def begin_trigger_rollover(
:param server_id: Server Id. Required.
:type server_id: str
:param parameters: Body of Trigger Rollover request. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -951,7 +1157,7 @@ def begin_trigger_rollover(
resource_group_name: str,
storage_sync_service_name: str,
server_id: str,
- parameters: Union[_models.TriggerRolloverRequest, IO],
+ parameters: Union[_models.TriggerRolloverRequest, IO[bytes]],
**kwargs: Any
) -> LROPoller[None]:
"""Triggers Server certificate rollover.
@@ -963,20 +1169,9 @@ def begin_trigger_rollover(
:type storage_sync_service_name: str
:param server_id: Server Id. Required.
:type server_id: str
- :param parameters: Body of Trigger Rollover request. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.storagesync.models.TriggerRolloverRequest or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Trigger Rollover request. Is either a TriggerRolloverRequest type or
+ a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.TriggerRolloverRequest or IO[bytes]
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -984,16 +1179,14 @@ def begin_trigger_rollover(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._trigger_rollover_initial( # type: ignore
+ raw_result = self._trigger_rollover_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
server_id=server_id,
@@ -1005,25 +1198,24 @@ def begin_trigger_rollover(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_trigger_rollover.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/registeredServers/{serverId}/triggerRollover"} # type: ignore
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_server_endpoints_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_server_endpoints_operations.py
index 07a571de6a09..e461961689f8 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_server_endpoints_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_server_endpoints_operations.py
@@ -6,8 +6,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -16,13 +17,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -30,12 +32,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import MicrosoftStorageSyncMixinABC, _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -54,8 +55,8 @@ def build_create_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -64,7 +65,7 @@ def build_create_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -73,7 +74,7 @@ def build_create_request(
"serverEndpointName": _SERIALIZER.url("server_endpoint_name", server_endpoint_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -97,8 +98,8 @@ def build_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -107,7 +108,7 @@ def build_update_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -116,7 +117,7 @@ def build_update_request(
"serverEndpointName": _SERIALIZER.url("server_endpoint_name", server_endpoint_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -140,7 +141,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -149,7 +150,7 @@ def build_get_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -158,7 +159,7 @@ def build_get_request(
"serverEndpointName": _SERIALIZER.url("server_endpoint_name", server_endpoint_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -180,7 +181,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -189,7 +190,7 @@ def build_delete_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -198,7 +199,7 @@ def build_delete_request(
"serverEndpointName": _SERIALIZER.url("server_endpoint_name", server_endpoint_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -215,7 +216,7 @@ def build_list_by_sync_group_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -224,7 +225,7 @@ def build_list_by_sync_group_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -232,7 +233,7 @@ def build_list_by_sync_group_request(
"syncGroupName": _SERIALIZER.url("sync_group_name", sync_group_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -254,8 +255,8 @@ def build_recall_action_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -264,7 +265,7 @@ def build_recall_action_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}/recallAction",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -273,7 +274,7 @@ def build_recall_action_request(
"serverEndpointName": _SERIALIZER.url("server_endpoint_name", server_endpoint_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -311,10 +312,10 @@ def _create_initial(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: Union[_models.ServerEndpointCreateParameters, IO],
+ parameters: Union[_models.ServerEndpointCreateParameters, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.ServerEndpoint]:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -325,21 +326,19 @@ def _create_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.ServerEndpoint]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ServerEndpointCreateParameters")
- request = build_create_request(
+ _request = build_create_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -349,25 +348,28 @@ def _create_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
if response.status_code == 200:
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
@@ -375,8 +377,6 @@ def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("ServerEndpoint", pipeline_response)
-
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
@@ -387,12 +387,12 @@ def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
- return deserialized
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}"} # type: ignore
+ return deserialized # type: ignore
@overload
def begin_create(
@@ -422,14 +422,6 @@ def begin_create(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either ServerEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.ServerEndpoint]
@@ -443,7 +435,7 @@ def begin_create(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -460,18 +452,10 @@ def begin_create(
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
:param parameters: Body of Server Endpoint object. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either ServerEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.ServerEndpoint]
@@ -485,7 +469,7 @@ def begin_create(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: Union[_models.ServerEndpointCreateParameters, IO],
+ parameters: Union[_models.ServerEndpointCreateParameters, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.ServerEndpoint]:
"""Create a new ServerEndpoint.
@@ -499,20 +483,9 @@ def begin_create(
:type sync_group_name: str
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
- :param parameters: Body of Server Endpoint object. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.storagesync.models.ServerEndpointCreateParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Server Endpoint object. Is either a ServerEndpointCreateParameters
+ type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.ServerEndpointCreateParameters or IO[bytes]
:return: An instance of LROPoller that returns either ServerEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.ServerEndpoint]
@@ -521,16 +494,14 @@ def begin_create(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.ServerEndpoint]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.ServerEndpoint] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._create_initial( # type: ignore
+ raw_result = self._create_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -543,6 +514,7 @@ def begin_create(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
@@ -553,27 +525,27 @@ def get_long_running_output(pipeline_response):
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("ServerEndpoint", pipeline_response)
+ deserialized = self._deserialize("ServerEndpoint", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.ServerEndpoint].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}"} # type: ignore
+ return LROPoller[_models.ServerEndpoint](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
def _update_initial(
self,
@@ -581,10 +553,10 @@ def _update_initial(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: Optional[Union[_models.ServerEndpointUpdateParameters, IO]] = None,
+ parameters: Optional[Union[_models.ServerEndpointUpdateParameters, IO[bytes]]] = None,
**kwargs: Any
- ) -> Optional[_models.ServerEndpoint]:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -595,16 +567,14 @@ def _update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.ServerEndpoint]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
if parameters is not None:
@@ -612,7 +582,7 @@ def _update_initial(
else:
_json = None
- request = build_update_request(
+ _request = build_update_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -622,25 +592,28 @@ def _update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
if response.status_code == 200:
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
@@ -648,8 +621,6 @@ def _update_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("ServerEndpoint", pipeline_response)
-
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
@@ -660,12 +631,12 @@ def _update_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
- return deserialized
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}"} # type: ignore
+ return deserialized # type: ignore
@overload
def begin_update(
@@ -695,14 +666,6 @@ def begin_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either ServerEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.ServerEndpoint]
@@ -716,7 +679,7 @@ def begin_update(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: Optional[IO] = None,
+ parameters: Optional[IO[bytes]] = None,
*,
content_type: str = "application/json",
**kwargs: Any
@@ -733,18 +696,10 @@ def begin_update(
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
:param parameters: Any of the properties applicable in PUT request. Default value is None.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either ServerEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.ServerEndpoint]
@@ -758,7 +713,7 @@ def begin_update(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: Optional[Union[_models.ServerEndpointUpdateParameters, IO]] = None,
+ parameters: Optional[Union[_models.ServerEndpointUpdateParameters, IO[bytes]]] = None,
**kwargs: Any
) -> LROPoller[_models.ServerEndpoint]:
"""Patch a given ServerEndpoint.
@@ -772,20 +727,9 @@ def begin_update(
:type sync_group_name: str
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
- :param parameters: Any of the properties applicable in PUT request. Is either a model type or a
- IO type. Default value is None.
- :type parameters: ~azure.mgmt.storagesync.models.ServerEndpointUpdateParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Any of the properties applicable in PUT request. Is either a
+ ServerEndpointUpdateParameters type or a IO[bytes] type. Default value is None.
+ :type parameters: ~azure.mgmt.storagesync.models.ServerEndpointUpdateParameters or IO[bytes]
:return: An instance of LROPoller that returns either ServerEndpoint or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.ServerEndpoint]
@@ -794,16 +738,14 @@ def begin_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.ServerEndpoint]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.ServerEndpoint] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._update_initial( # type: ignore
+ raw_result = self._update_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -816,6 +758,7 @@ def begin_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
@@ -826,27 +769,27 @@ def get_long_running_output(pipeline_response):
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("ServerEndpoint", pipeline_response)
+ deserialized = self._deserialize("ServerEndpoint", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.ServerEndpoint].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}"} # type: ignore
+ return LROPoller[_models.ServerEndpoint](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace
def get(
@@ -868,12 +811,11 @@ def get(
:type sync_group_name: str
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: ServerEndpoint or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.ServerEndpoint
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -884,27 +826,24 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.ServerEndpoint]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.ServerEndpoint] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
server_endpoint_name=server_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -920,24 +859,22 @@ def get(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("ServerEndpoint", pipeline_response)
+ deserialized = self._deserialize("ServerEndpoint", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- return deserialized
+ return deserialized # type: ignore
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}"} # type: ignore
-
- def _delete_initial( # pylint: disable=inconsistent-return-statements
+ def _delete_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -948,32 +885,34 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
server_endpoint_name=server_endpoint_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -992,10 +931,12 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(
@@ -1017,14 +958,6 @@ def begin_delete(
:type sync_group_name: str
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1032,15 +965,13 @@ def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1051,28 +982,27 @@ def begin_delete(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}"} # type: ignore
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace
def list_by_sync_group(
@@ -1087,7 +1017,6 @@ def list_by_sync_group(
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ServerEndpoint or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storagesync.models.ServerEndpoint]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1095,12 +1024,10 @@ def list_by_sync_group(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.ServerEndpointArray]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.ServerEndpointArray] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1111,18 +1038,16 @@ def list_by_sync_group(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_sync_group_request(
+ _request = build_list_by_sync_group_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_sync_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1134,26 +1059,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("ServerEndpointArray", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1166,18 +1091,16 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list_by_sync_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints"} # type: ignore
-
- def _recall_action_initial( # pylint: disable=inconsistent-return-statements
+ def _recall_action_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: Union[_models.RecallActionParameters, IO],
+ parameters: Union[_models.RecallActionParameters, IO[bytes]],
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1188,21 +1111,19 @@ def _recall_action_initial( # pylint: disable=inconsistent-return-statements
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "RecallActionParameters")
- request = build_recall_action_request(
+ _request = build_recall_action_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1212,20 +1133,24 @@ def _recall_action_initial( # pylint: disable=inconsistent-return-statements
content_type=content_type,
json=_json,
content=_content,
- template_url=self._recall_action_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -1244,10 +1169,12 @@ def _recall_action_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _recall_action_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}/recallAction"} # type: ignore
+ return deserialized # type: ignore
@overload
def begin_recall_action(
@@ -1277,14 +1204,6 @@ def begin_recall_action(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1297,7 +1216,7 @@ def begin_recall_action(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -1314,18 +1233,10 @@ def begin_recall_action(
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
:param parameters: Body of Recall Action object. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1338,7 +1249,7 @@ def begin_recall_action(
storage_sync_service_name: str,
sync_group_name: str,
server_endpoint_name: str,
- parameters: Union[_models.RecallActionParameters, IO],
+ parameters: Union[_models.RecallActionParameters, IO[bytes]],
**kwargs: Any
) -> LROPoller[None]:
"""Recall a server endpoint.
@@ -1352,19 +1263,9 @@ def begin_recall_action(
:type sync_group_name: str
:param server_endpoint_name: Name of Server Endpoint object. Required.
:type server_endpoint_name: str
- :param parameters: Body of Recall Action object. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.storagesync.models.RecallActionParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Body of Recall Action object. Is either a RecallActionParameters type or a
+ IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.RecallActionParameters or IO[bytes]
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1372,16 +1273,14 @@ def begin_recall_action(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._recall_action_initial( # type: ignore
+ raw_result = self._recall_action_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -1394,25 +1293,24 @@ def begin_recall_action(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_recall_action.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}/serverEndpoints/{serverEndpointName}/recallAction"} # type: ignore
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_storage_sync_services_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_storage_sync_services_operations.py
index edbdfd1bb004..4b7baa85f253 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_storage_sync_services_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_storage_sync_services_operations.py
@@ -6,8 +6,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -16,13 +17,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -30,12 +32,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import MicrosoftStorageSyncMixinABC, _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -47,8 +48,8 @@ def build_check_name_availability_request(location_name: str, subscription_id: s
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -58,10 +59,10 @@ def build_check_name_availability_request(location_name: str, subscription_id: s
) # pylint: disable=line-too-long
path_format_arguments = {
"locationName": _SERIALIZER.url("location_name", location_name, "str"),
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -80,8 +81,8 @@ def build_create_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -90,14 +91,14 @@ def build_create_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"storageSyncServiceName": _SERIALIZER.url("storage_sync_service_name", storage_sync_service_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -116,7 +117,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -125,14 +126,14 @@ def build_get_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"storageSyncServiceName": _SERIALIZER.url("storage_sync_service_name", storage_sync_service_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -149,8 +150,8 @@ def build_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -159,14 +160,14 @@ def build_update_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"storageSyncServiceName": _SERIALIZER.url("storage_sync_service_name", storage_sync_service_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -185,7 +186,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -194,14 +195,14 @@ def build_delete_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"storageSyncServiceName": _SERIALIZER.url("storage_sync_service_name", storage_sync_service_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -216,7 +217,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -225,13 +226,13 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -246,7 +247,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -254,10 +255,10 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H
"template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.StorageSync/storageSyncServices"
)
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -305,7 +306,6 @@ def check_name_availability(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: CheckNameAvailabilityResult or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.CheckNameAvailabilityResult
:raises ~azure.core.exceptions.HttpResponseError:
@@ -313,18 +313,17 @@ def check_name_availability(
@overload
def check_name_availability(
- self, location_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
+ self, location_name: str, parameters: IO[bytes], *, content_type: str = "application/json", **kwargs: Any
) -> _models.CheckNameAvailabilityResult:
"""Check the give namespace name availability.
:param location_name: The desired region for the name check. Required.
:type location_name: str
:param parameters: Parameters to check availability of the given namespace name. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: CheckNameAvailabilityResult or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.CheckNameAvailabilityResult
:raises ~azure.core.exceptions.HttpResponseError:
@@ -332,24 +331,20 @@ def check_name_availability(
@distributed_trace
def check_name_availability(
- self, location_name: str, parameters: Union[_models.CheckNameAvailabilityParameters, IO], **kwargs: Any
+ self, location_name: str, parameters: Union[_models.CheckNameAvailabilityParameters, IO[bytes]], **kwargs: Any
) -> _models.CheckNameAvailabilityResult:
"""Check the give namespace name availability.
:param location_name: The desired region for the name check. Required.
:type location_name: str
:param parameters: Parameters to check availability of the given namespace name. Is either a
- model type or a IO type. Required.
- :type parameters: ~azure.mgmt.storagesync.models.CheckNameAvailabilityParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ CheckNameAvailabilityParameters type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.CheckNameAvailabilityParameters or IO[bytes]
:return: CheckNameAvailabilityResult or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.CheckNameAvailabilityResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -360,61 +355,57 @@ def check_name_availability(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.CheckNameAvailabilityResult]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.CheckNameAvailabilityResult] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "CheckNameAvailabilityParameters")
- request = build_check_name_availability_request(
+ _request = build_check_name_availability_request(
location_name=location_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
- template_url=self.check_name_availability.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("CheckNameAvailabilityResult", pipeline_response)
+ deserialized = self._deserialize("CheckNameAvailabilityResult", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- return deserialized
-
- check_name_availability.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StorageSync/locations/{locationName}/checkNameAvailability"} # type: ignore
+ return deserialized # type: ignore
def _create_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
- parameters: Union[_models.StorageSyncServiceCreateParameters, IO],
+ parameters: Union[_models.StorageSyncServiceCreateParameters, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.StorageSyncService]:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -425,21 +416,19 @@ def _create_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.StorageSyncService]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "StorageSyncServiceCreateParameters")
- request = build_create_request(
+ _request = build_create_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
@@ -447,29 +436,29 @@ def _create_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
- if response.status_code == 200:
- deserialized = self._deserialize("StorageSyncService", pipeline_response)
-
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
@@ -481,12 +470,12 @@ def _create_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
- return deserialized
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}"} # type: ignore
+ return deserialized # type: ignore
@overload
def begin_create(
@@ -510,14 +499,6 @@ def begin_create(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either StorageSyncService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.StorageSyncService]
@@ -529,7 +510,7 @@ def begin_create(
self,
resource_group_name: str,
storage_sync_service_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -542,18 +523,10 @@ def begin_create(
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param parameters: Storage Sync Service resource name. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either StorageSyncService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.StorageSyncService]
@@ -565,7 +538,7 @@ def begin_create(
self,
resource_group_name: str,
storage_sync_service_name: str,
- parameters: Union[_models.StorageSyncServiceCreateParameters, IO],
+ parameters: Union[_models.StorageSyncServiceCreateParameters, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.StorageSyncService]:
"""Create a new StorageSyncService.
@@ -575,20 +548,10 @@ def begin_create(
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :param parameters: Storage Sync Service resource name. Is either a model type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.storagesync.models.StorageSyncServiceCreateParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Storage Sync Service resource name. Is either a
+ StorageSyncServiceCreateParameters type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.StorageSyncServiceCreateParameters or
+ IO[bytes]
:return: An instance of LROPoller that returns either StorageSyncService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.StorageSyncService]
@@ -597,16 +560,14 @@ def begin_create(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageSyncService]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.StorageSyncService] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._create_initial( # type: ignore
+ raw_result = self._create_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
parameters=parameters,
@@ -617,30 +578,31 @@ def begin_create(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("StorageSyncService", pipeline_response)
+ deserialized = self._deserialize("StorageSyncService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.StorageSyncService].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}"} # type: ignore
+ return LROPoller[_models.StorageSyncService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace
def get(
@@ -653,12 +615,11 @@ def get(
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageSyncService or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.StorageSyncService
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -669,25 +630,22 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageSyncService]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.StorageSyncService] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -703,23 +661,21 @@ def get(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("StorageSyncService", pipeline_response)
+ deserialized = self._deserialize("StorageSyncService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- return deserialized
-
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}"} # type: ignore
+ return deserialized # type: ignore
def _update_initial(
self,
resource_group_name: str,
storage_sync_service_name: str,
- parameters: Optional[Union[_models.StorageSyncServiceUpdateParameters, IO]] = None,
+ parameters: Optional[Union[_models.StorageSyncServiceUpdateParameters, IO[bytes]]] = None,
**kwargs: Any
- ) -> Optional[_models.StorageSyncService]:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -730,16 +686,14 @@ def _update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.StorageSyncService]]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
if parameters is not None:
@@ -747,7 +701,7 @@ def _update_initial(
else:
_json = None
- request = build_update_request(
+ _request = build_update_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
@@ -755,25 +709,28 @@ def _update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
if response.status_code == 200:
response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
@@ -781,8 +738,6 @@ def _update_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("StorageSyncService", pipeline_response)
-
if response.status_code == 202:
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
@@ -794,12 +749,12 @@ def _update_initial(
"str", response.headers.get("x-ms-correlation-request-id")
)
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
- return deserialized
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}"} # type: ignore
+ return deserialized # type: ignore
@overload
def begin_update(
@@ -823,14 +778,6 @@ def begin_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either StorageSyncService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.StorageSyncService]
@@ -842,7 +789,7 @@ def begin_update(
self,
resource_group_name: str,
storage_sync_service_name: str,
- parameters: Optional[IO] = None,
+ parameters: Optional[IO[bytes]] = None,
*,
content_type: str = "application/json",
**kwargs: Any
@@ -855,18 +802,10 @@ def begin_update(
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
:param parameters: Storage Sync Service resource. Default value is None.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either StorageSyncService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.StorageSyncService]
@@ -878,7 +817,7 @@ def begin_update(
self,
resource_group_name: str,
storage_sync_service_name: str,
- parameters: Optional[Union[_models.StorageSyncServiceUpdateParameters, IO]] = None,
+ parameters: Optional[Union[_models.StorageSyncServiceUpdateParameters, IO[bytes]]] = None,
**kwargs: Any
) -> LROPoller[_models.StorageSyncService]:
"""Patch a given StorageSyncService.
@@ -888,20 +827,10 @@ def begin_update(
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :param parameters: Storage Sync Service resource. Is either a model type or a IO type. Default
- value is None.
- :type parameters: ~azure.mgmt.storagesync.models.StorageSyncServiceUpdateParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Storage Sync Service resource. Is either a
+ StorageSyncServiceUpdateParameters type or a IO[bytes] type. Default value is None.
+ :type parameters: ~azure.mgmt.storagesync.models.StorageSyncServiceUpdateParameters or
+ IO[bytes]
:return: An instance of LROPoller that returns either StorageSyncService or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagesync.models.StorageSyncService]
@@ -910,16 +839,14 @@ def begin_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageSyncService]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.StorageSyncService] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._update_initial( # type: ignore
+ raw_result = self._update_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
parameters=parameters,
@@ -930,6 +857,7 @@ def begin_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
@@ -940,32 +868,32 @@ def get_long_running_output(pipeline_response):
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("StorageSyncService", pipeline_response)
+ deserialized = self._deserialize("StorageSyncService", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
return deserialized
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.StorageSyncService].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}"} # type: ignore
+ return LROPoller[_models.StorageSyncService](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- def _delete_initial( # pylint: disable=inconsistent-return-statements
+ def _delete_initial(
self, resource_group_name: str, storage_sync_service_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -976,30 +904,32 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.StorageSyncError, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -1022,10 +952,12 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("x-ms-correlation-request-id")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(self, resource_group_name: str, storage_sync_service_name: str, **kwargs: Any) -> LROPoller[None]:
@@ -1036,14 +968,6 @@ def begin_delete(self, resource_group_name: str, storage_sync_service_name: str,
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1051,15 +975,13 @@ def begin_delete(self, resource_group_name: str, storage_sync_service_name: str,
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
- polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
- cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
api_version=api_version,
@@ -1068,28 +990,27 @@ def begin_delete(self, resource_group_name: str, storage_sync_service_name: str,
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
- polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
+ polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
-
- begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}"} # type: ignore
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
@distributed_trace
def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.StorageSyncService"]:
@@ -1098,7 +1019,6 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StorageSyncService or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storagesync.models.StorageSyncService]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1106,12 +1026,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageSyncServiceArray]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.StorageSyncServiceArray] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1122,16 +1040,14 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_resource_group_request(
+ _request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1143,26 +1059,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("StorageSyncServiceArray", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1175,13 +1091,10 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices"} # type: ignore
-
@distributed_trace
def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.StorageSyncService"]:
"""Get a StorageSyncService list by subscription.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StorageSyncService or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storagesync.models.StorageSyncService]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -1189,12 +1102,10 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.StorageSyncSe
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageSyncServiceArray]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.StorageSyncServiceArray] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1205,15 +1116,13 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.StorageSyncSe
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_subscription_request(
+ _request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_subscription.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -1225,26 +1134,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("StorageSyncServiceArray", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1256,5 +1165,3 @@ def get_next(next_link=None):
return pipeline_response
return ItemPaged(get_next, extract_data)
-
- list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StorageSync/storageSyncServices"} # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_sync_groups_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_sync_groups_operations.py
index ec47a835e05b..4169d4f3c5c4 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_sync_groups_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_sync_groups_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+from io import IOBase
import sys
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
@@ -20,20 +20,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import MicrosoftStorageSyncMixinABC, _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -41,13 +39,13 @@
_SERIALIZER.client_side_validation = False
-def build_list_by_storage_sync_service_request(
+def build_list_by_storage_sync_service_request( # pylint: disable=name-too-long
resource_group_name: str, storage_sync_service_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -56,14 +54,14 @@ def build_list_by_storage_sync_service_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"storageSyncServiceName": _SERIALIZER.url("storage_sync_service_name", storage_sync_service_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -80,8 +78,8 @@ def build_create_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -90,7 +88,7 @@ def build_create_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -98,7 +96,7 @@ def build_create_request(
"syncGroupName": _SERIALIZER.url("sync_group_name", sync_group_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -117,7 +115,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -126,7 +124,7 @@ def build_get_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -134,7 +132,7 @@ def build_get_request(
"syncGroupName": _SERIALIZER.url("sync_group_name", sync_group_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -151,7 +149,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -160,7 +158,7 @@ def build_delete_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -168,7 +166,7 @@ def build_delete_request(
"syncGroupName": _SERIALIZER.url("sync_group_name", sync_group_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -209,7 +207,6 @@ def list_by_storage_sync_service(
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SyncGroup or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storagesync.models.SyncGroup]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -217,12 +214,10 @@ def list_by_storage_sync_service(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.SyncGroupArray]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.SyncGroupArray] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -233,17 +228,15 @@ def list_by_storage_sync_service(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_storage_sync_service_request(
+ _request = build_list_by_storage_sync_service_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_storage_sync_service.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -255,26 +248,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("SyncGroupArray", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -287,8 +280,6 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list_by_storage_sync_service.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups"} # type: ignore
-
@overload
def create(
self,
@@ -314,7 +305,6 @@ def create(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: SyncGroup or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.SyncGroup
:raises ~azure.core.exceptions.HttpResponseError:
@@ -326,7 +316,7 @@ def create(
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -341,11 +331,10 @@ def create(
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
:param parameters: Sync Group Body. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: SyncGroup or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.SyncGroup
:raises ~azure.core.exceptions.HttpResponseError:
@@ -357,7 +346,7 @@ def create(
resource_group_name: str,
storage_sync_service_name: str,
sync_group_name: str,
- parameters: Union[_models.SyncGroupCreateParameters, IO],
+ parameters: Union[_models.SyncGroupCreateParameters, IO[bytes]],
**kwargs: Any
) -> _models.SyncGroup:
"""Create a new SyncGroup.
@@ -369,17 +358,14 @@ def create(
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
- :param parameters: Sync Group Body. Is either a model type or a IO type. Required.
- :type parameters: ~azure.mgmt.storagesync.models.SyncGroupCreateParameters or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
+ :param parameters: Sync Group Body. Is either a SyncGroupCreateParameters type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.storagesync.models.SyncGroupCreateParameters or IO[bytes]
:return: SyncGroup or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.SyncGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -390,21 +376,19 @@ def create(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.SyncGroup]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.SyncGroup] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
- if isinstance(parameters, (IO, bytes)):
+ if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "SyncGroupCreateParameters")
- request = build_create_request(
+ _request = build_create_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
@@ -413,15 +397,14 @@ def create(
content_type=content_type,
json=_json,
content=_content,
- template_url=self.create.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -437,14 +420,12 @@ def create(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("SyncGroup", pipeline_response)
+ deserialized = self._deserialize("SyncGroup", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
-
- return deserialized
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace
def get(
@@ -459,12 +440,11 @@ def get(
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: SyncGroup or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.SyncGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -475,26 +455,23 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.SyncGroup]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.SyncGroup] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -510,14 +487,12 @@ def get(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("SyncGroup", pipeline_response)
+ deserialized = self._deserialize("SyncGroup", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- return deserialized
-
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace
def delete( # pylint: disable=inconsistent-return-statements
@@ -532,12 +507,11 @@ def delete( # pylint: disable=inconsistent-return-statements
:type storage_sync_service_name: str
:param sync_group_name: Name of Sync Group resource. Required.
:type sync_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -548,26 +522,23 @@ def delete( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
sync_group_name=sync_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -585,6 +556,4 @@ def delete( # pylint: disable=inconsistent-return-statements
)
if cls:
- return cls(pipeline_response, None, response_headers)
-
- delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/syncGroups/{syncGroupName}"} # type: ignore
+ return cls(pipeline_response, None, response_headers) # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_workflows_operations.py b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_workflows_operations.py
index 9f22e3b34b69..917e83d86fb1 100644
--- a/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_workflows_operations.py
+++ b/sdk/storage/azure-mgmt-storagesync/azure/mgmt/storagesync/operations/_workflows_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -20,20 +19,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import MicrosoftStorageSyncMixinABC, _convert_request, _format_url_section
-if sys.version_info >= (3, 8):
- from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
else:
- from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -41,13 +38,13 @@
_SERIALIZER.client_side_validation = False
-def build_list_by_storage_sync_service_request(
+def build_list_by_storage_sync_service_request( # pylint: disable=name-too-long
resource_group_name: str, storage_sync_service_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -56,14 +53,14 @@ def build_list_by_storage_sync_service_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/workflows",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"storageSyncServiceName": _SERIALIZER.url("storage_sync_service_name", storage_sync_service_name, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -80,7 +77,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -89,7 +86,7 @@ def build_get_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/workflows/{workflowId}",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -97,7 +94,7 @@ def build_get_request(
"workflowId": _SERIALIZER.url("workflow_id", workflow_id, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -114,7 +111,7 @@ def build_abort_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-06-01")) # type: Literal["2022-06-01"]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -123,7 +120,7 @@ def build_abort_request(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/workflows/{workflowId}/abort",
) # pylint: disable=line-too-long
path_format_arguments = {
- "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
@@ -131,7 +128,7 @@ def build_abort_request(
"workflowId": _SERIALIZER.url("workflow_id", workflow_id, "str"),
}
- _url = _format_url_section(_url, **path_format_arguments)
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -172,7 +169,6 @@ def list_by_storage_sync_service(
:type resource_group_name: str
:param storage_sync_service_name: Name of Storage Sync Service resource. Required.
:type storage_sync_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Workflow or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storagesync.models.Workflow]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -180,12 +176,10 @@ def list_by_storage_sync_service(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.WorkflowArray]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.WorkflowArray] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -196,17 +190,15 @@ def list_by_storage_sync_service(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_storage_sync_service_request(
+ _request = build_list_by_storage_sync_service_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_storage_sync_service.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
else:
# make call to next link with the client's api-version
@@ -218,26 +210,26 @@ def prepare_request(next_link=None):
}
)
_next_request_params["api-version"] = self._config.api_version
- request = HttpRequest(
+ _request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
- request.method = "GET"
- return request
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("WorkflowArray", pipeline_response)
list_of_elem = deserialized.value
if cls:
- list_of_elem = cls(list_of_elem)
+ list_of_elem = cls(list_of_elem) # type: ignore
return None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -250,8 +242,6 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list_by_storage_sync_service.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/workflows"} # type: ignore
-
@distributed_trace
def get(
self, resource_group_name: str, storage_sync_service_name: str, workflow_id: str, **kwargs: Any
@@ -265,12 +255,11 @@ def get(
:type storage_sync_service_name: str
:param workflow_id: workflow Id. Required.
:type workflow_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Workflow or the result of cls(response)
:rtype: ~azure.mgmt.storagesync.models.Workflow
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -281,26 +270,23 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[_models.Workflow]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.Workflow] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
workflow_id=workflow_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -316,14 +302,12 @@ def get(
"str", response.headers.get("x-ms-correlation-request-id")
)
- deserialized = self._deserialize("Workflow", pipeline_response)
+ deserialized = self._deserialize("Workflow", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, response_headers)
-
- return deserialized
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
- get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/workflows/{workflowId}"} # type: ignore
+ return deserialized # type: ignore
@distributed_trace
def abort( # pylint: disable=inconsistent-return-statements
@@ -338,12 +322,11 @@ def abort( # pylint: disable=inconsistent-return-statements
:type storage_sync_service_name: str
:param workflow_id: workflow Id. Required.
:type workflow_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -354,26 +337,23 @@ def abort( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version = kwargs.pop(
- "api_version", _params.pop("api-version", self._config.api_version)
- ) # type: Literal["2022-06-01"]
- cls = kwargs.pop("cls", None) # type: ClsType[None]
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
- request = build_abort_request(
+ _request = build_abort_request(
resource_group_name=resource_group_name,
storage_sync_service_name=storage_sync_service_name,
workflow_id=workflow_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.abort.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url) # type: ignore
+ _request.url = self._client.format_url(_request.url)
- pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
- request, stream=False, **kwargs
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -390,6 +370,4 @@ def abort( # pylint: disable=inconsistent-return-statements
)
if cls:
- return cls(pipeline_response, None, response_headers)
-
- abort.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/workflows/{workflowId}/abort"} # type: ignore
+ return cls(pipeline_response, None, response_headers) # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_afs_share_metadata_certificate_public_keys.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_afs_share_metadata_certificate_public_keys.py
index c7496e287cd7..78aa79b0fd7b 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_afs_share_metadata_certificate_public_keys.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_afs_share_metadata_certificate_public_keys.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -38,6 +39,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/CloudEndpoints_AfsShareMetadataCertificatePublicKeys.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/CloudEndpoints_AfsShareMetadataCertificatePublicKeys.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_create.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_create.py
index 70ea3047e6eb..ad96fb53f6bc 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_create.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_create.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -46,6 +47,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/CloudEndpoints_Create.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/CloudEndpoints_Create.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_delete.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_delete.py
index 4bd336a383a1..70ce539133b4 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_delete.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -29,15 +30,14 @@ def main():
subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
- response = client.cloud_endpoints.begin_delete(
+ client.cloud_endpoints.begin_delete(
resource_group_name="SampleResourceGroup_1",
storage_sync_service_name="SampleStorageSyncService_1",
sync_group_name="SampleSyncGroup_1",
cloud_endpoint_name="SampleCloudEndpoint_1",
).result()
- print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/CloudEndpoints_Delete.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/CloudEndpoints_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_get.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_get.py
index a7104a3e0f9d..1a416cca778c 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_get.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -38,6 +39,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/CloudEndpoints_Get.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/CloudEndpoints_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_list_by_sync_group.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_list_by_sync_group.py
index eadc980391c4..91a23e8c5608 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_list_by_sync_group.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_list_by_sync_group.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -38,6 +39,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/CloudEndpoints_ListBySyncGroup.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/CloudEndpoints_ListBySyncGroup.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_post_backup.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_post_backup.py
index ea824b2a8479..036901be05ee 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_post_backup.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_post_backup.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -39,6 +40,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/CloudEndpoints_PostBackup.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/CloudEndpoints_PostBackup.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_post_restore.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_post_restore.py
index cfc3e329bac6..2470bf75acd4 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_post_restore.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_post_restore.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -29,7 +30,7 @@ def main():
subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
- response = client.cloud_endpoints.begin_post_restore(
+ client.cloud_endpoints.begin_post_restore(
resource_group_name="SampleResourceGroup_1",
storage_sync_service_name="SampleStorageSyncService_1",
sync_group_name="SampleSyncGroup_1",
@@ -46,9 +47,8 @@ def main():
"status": "Succeeded",
},
).result()
- print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/CloudEndpoints_PostRestore.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/CloudEndpoints_PostRestore.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_pre_backup.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_pre_backup.py
index 3d7428a52285..916a623ae1e3 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_pre_backup.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_pre_backup.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -29,16 +30,15 @@ def main():
subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
- response = client.cloud_endpoints.begin_pre_backup(
+ client.cloud_endpoints.begin_pre_backup(
resource_group_name="SampleResourceGroup_1",
storage_sync_service_name="SampleStorageSyncService_1",
sync_group_name="SampleSyncGroup_1",
cloud_endpoint_name="SampleCloudEndpoint_1",
parameters={"azureFileShare": "https://sampleserver.file.core.test-cint.azure-test.net/sampleFileShare"},
).result()
- print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/CloudEndpoints_PreBackup.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/CloudEndpoints_PreBackup.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_pre_restore.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_pre_restore.py
index 0d4879b72969..a4ff610969fd 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_pre_restore.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_pre_restore.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -29,7 +30,7 @@ def main():
subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
- response = client.cloud_endpoints.begin_pre_restore(
+ client.cloud_endpoints.begin_pre_restore(
resource_group_name="SampleResourceGroup_1",
storage_sync_service_name="SampleStorageSyncService_1",
sync_group_name="SampleSyncGroup_1",
@@ -44,9 +45,8 @@ def main():
],
},
).result()
- print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/CloudEndpoints_PreRestore.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/CloudEndpoints_PreRestore.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_restoreheartbeat.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_restore_heatbeat.py
similarity index 89%
rename from sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_restoreheartbeat.py
rename to sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_restore_heatbeat.py
index b9ae85097b34..8a633cd03f07 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_restoreheartbeat.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_restore_heatbeat.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -14,7 +15,7 @@
pip install azure-identity
pip install azure-mgmt-storagesync
# USAGE
- python cloud_endpoints_restoreheartbeat.py
+ python cloud_endpoints_restore_heatbeat.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
@@ -29,15 +30,14 @@ def main():
subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
- response = client.cloud_endpoints.restoreheartbeat(
+ client.cloud_endpoints.restoreheartbeat(
resource_group_name="SampleResourceGroup_1",
storage_sync_service_name="SampleStorageSyncService_1",
sync_group_name="SampleSyncGroup_1",
cloud_endpoint_name="SampleCloudEndpoint_1",
)
- print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/CloudEndpoints_RestoreHeatbeat.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/CloudEndpoints_RestoreHeatbeat.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_trigger_change_detection.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_trigger_change_detection.py
index afa556a50068..b6ca3124e42f 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_trigger_change_detection.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/cloud_endpoints_trigger_change_detection.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -29,16 +30,15 @@ def main():
subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
- response = client.cloud_endpoints.begin_trigger_change_detection(
+ client.cloud_endpoints.begin_trigger_change_detection(
resource_group_name="SampleResourceGroup_1",
storage_sync_service_name="SampleStorageSyncService_1",
sync_group_name="SampleSyncGroup_1",
cloud_endpoint_name="SampleCloudEndpoint_1",
parameters={"changeDetectionMode": "Recursive", "directoryPath": "NewDirectory"},
).result()
- print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/CloudEndpoints_TriggerChangeDetection.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/CloudEndpoints_TriggerChangeDetection.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/location_operation_status_get.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/location_operation_status_get.py
new file mode 100644
index 000000000000..8cb299073c98
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/location_operation_status_get.py
@@ -0,0 +1,42 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.storagesync import MicrosoftStorageSync
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-storagesync
+# USAGE
+ python location_operation_status_get.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = MicrosoftStorageSync(
+ credential=DefaultAzureCredential(),
+ subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
+ )
+
+ response = client.location_operation_status(
+ location_name="westus",
+ operation_id="eyJwYXJ0aXRpb25JZCI6ImE1ZDNiMDU4LTYwN2MtNDI0Ny05Y2FmLWJlZmU4NGQ0ZDU0NyIsIndvcmtmbG93SWQiOiJjYzg1MTY2YS0xMjI2LTQ4MGYtYWM5ZC1jMmRhNTVmY2M2ODYiLCJ3b3JrZmxvd09wZXJhdGlvbklkIjoiOTdmODU5ZTAtOGY1MC00ZTg4LWJkZDEtNWZlYzgwYTVlYzM0tui=",
+ )
+ print(response)
+
+
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/LocationOperationStatus_Get.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/operation_status_get.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/operation_status_get.py
new file mode 100644
index 000000000000..9a067d4de7d5
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/operation_status_get.py
@@ -0,0 +1,44 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.storagesync import MicrosoftStorageSync
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-storagesync
+# USAGE
+ python operation_status_get.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = MicrosoftStorageSync(
+ credential=DefaultAzureCredential(),
+ subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
+ )
+
+ response = client.operation_status.get(
+ resource_group_name="SampleResourceGroup_1",
+ location_name="westus",
+ workflow_id="828219ea-083e-48b5-89ea-8fd9991b2e75",
+ operation_id="14b50e24-f68d-4b29-a882-38be9dfb8bd1",
+ )
+ print(response)
+
+
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/OperationStatus_Get.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/operations_list.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/operations_list.py
index 6235595e7d98..000b10217158 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/operations_list.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/operations_list.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -34,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/Operations_List.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/Operations_List.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_create.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_create.py
index 9c1b9d6dd988..5d2fc14120b6 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_create.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_create.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -26,7 +27,7 @@
def main():
client = MicrosoftStorageSync(
credential=DefaultAzureCredential(),
- subscription_id="{subscription-id}",
+ subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
response = client.private_endpoint_connections.begin_create(
@@ -40,6 +41,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/PrivateEndpointConnections_Create.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/PrivateEndpointConnections_Create.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_delete.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_delete.py
index 77e8361ef7a4..88eeeeb96d21 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_delete.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -26,17 +27,16 @@
def main():
client = MicrosoftStorageSync(
credential=DefaultAzureCredential(),
- subscription_id="{subscription-id}",
+ subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
- response = client.private_endpoint_connections.begin_delete(
+ client.private_endpoint_connections.begin_delete(
resource_group_name="res6977",
storage_sync_service_name="sss2527",
private_endpoint_connection_name="{privateEndpointConnectionName}",
).result()
- print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/PrivateEndpointConnections_Delete.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/PrivateEndpointConnections_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_get.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_get.py
index 59fb9102f15f..ea26ed9e8aad 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_get.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -26,7 +27,7 @@
def main():
client = MicrosoftStorageSync(
credential=DefaultAzureCredential(),
- subscription_id="{subscription-id}",
+ subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
response = client.private_endpoint_connections.get(
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/PrivateEndpointConnections_Get.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/PrivateEndpointConnections_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_list_by_storage_sync_service.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_list_by_storage_sync_service.py
index 7ffacaf9b93b..7d695e62e263 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_list_by_storage_sync_service.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/private_endpoint_connections_list_by_storage_sync_service.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -26,7 +27,7 @@
def main():
client = MicrosoftStorageSync(
credential=DefaultAzureCredential(),
- subscription_id="{subscription-id}",
+ subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
response = client.private_endpoint_connections.list_by_storage_sync_service(
@@ -37,6 +38,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/PrivateEndpointConnections_ListByStorageSyncService.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/PrivateEndpointConnections_ListByStorageSyncService.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/private_link_resources_list.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/private_link_resources_list.py
index 54ed34f760aa..4d9597b2e1a1 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/private_link_resources_list.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/private_link_resources_list.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -26,7 +27,7 @@
def main():
client = MicrosoftStorageSync(
credential=DefaultAzureCredential(),
- subscription_id="{subscription-id}",
+ subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
response = client.private_link_resources.list_by_storage_sync_service(
@@ -36,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/PrivateLinkResources_List.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/PrivateLinkResources_List.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_create.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_create.py
index eba76eaf65a0..b962e4659302 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_create.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_create.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -36,7 +37,9 @@ def main():
parameters={
"properties": {
"agentVersion": "1.0.277.0",
+ "applicationId": "00000000-0000-0000-0000-000000000001",
"friendlyName": "afscv-2304-139",
+ "identity": True,
"serverCertificate": "MIIDFjCCAf6gAwIBAgIQQS+DS8uhc4VNzUkTw7wbRjANBgkqhkiG9w0BAQ0FADAzMTEwLwYDVQQDEyhhbmt1c2hiLXByb2QzLnJlZG1vbmQuY29ycC5taWNyb3NvZnQuY29tMB4XDTE3MDgwMzE3MDQyNFoXDTE4MDgwNDE3MDQyNFowMzExMC8GA1UEAxMoYW5rdXNoYi1wcm9kMy5yZWRtb25kLmNvcnAubWljcm9zb2Z0LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALDRvV4gmsIy6jGDPiHsXmvgVP749NNP7DopdlbHaNhjFmYINHl0uWylyaZmgJrROt2mnxN/zEyJtGnqYHlzUr4xvGq/qV5pqgdB9tag/sw9i22gfe9PRZ0FmSOZnXMbLYgLiDFqLtut5gHcOuWMj03YnkfoBEKlFBxWbagvW2yxz/Sxi9OVSJOKCaXra0RpcIHrO/KFl6ho2eE1/7Ykmfa8hZvSdoPd5gHdLiQcMB/pxq+mWp1fI6c8vFZoDu7Atn+NXTzYPKUxKzaisF12TsaKpohUsJpbB3Wocb0F5frn614D2pg14ERB5otjAMWw1m65csQWPI6dP8KIYe0+QPkCAwEAAaMmMCQwIgYDVR0lAQH/BBgwFgYIKwYBBQUHAwIGCisGAQQBgjcKAwwwDQYJKoZIhvcNAQENBQADggEBAA4RhVIBkw34M1RwakJgHvtjsOFxF1tVQA941NtLokx1l2Z8+GFQkcG4xpZSt+UN6wLerdCbnNhtkCErWUDeaT0jxk4g71Ofex7iM04crT4iHJr8mi96/XnhnkTUs+GDk12VgdeeNEczMZz+8Mxw9dJ5NCnYgTwO0SzGlclRsDvjzkLo8rh2ZG6n/jKrEyNXXo+hOqhupij0QbRP2Tvexdfw201kgN1jdZify8XzJ8Oi0bTS0KpJf2pNPOlooK2bjMUei9ANtEdXwwfVZGWvVh6tJjdv6k14wWWJ1L7zhA1IIVb1J+sQUzJji5iX0DrezjTz1Fg+gAzITaA/WsuujlM=",
"serverId": "080d4133-bdb5-40a0-96a0-71a6057bfe9a",
"serverOSVersion": "10.0.14393.0",
@@ -47,6 +50,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/RegisteredServers_Create.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/RegisteredServers_Create.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_delete.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_delete.py
index 433735ddd9ff..bcf396c460fb 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_delete.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -29,14 +30,13 @@ def main():
subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
- response = client.registered_servers.begin_delete(
+ client.registered_servers.begin_delete(
resource_group_name="SampleResourceGroup_1",
storage_sync_service_name="SampleStorageSyncService_1",
server_id="41166691-ab03-43e9-ab3e-0330eda162ac",
).result()
- print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/RegisteredServers_Delete.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/RegisteredServers_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_get.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_get.py
index d027041dea61..d0d3b4e40b58 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_get.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/RegisteredServers_Get.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/RegisteredServers_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_list_by_storage_sync_service.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_list_by_storage_sync_service.py
index 7ef39b2e1b85..16e0716a457b 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_list_by_storage_sync_service.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_list_by_storage_sync_service.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -37,6 +38,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/RegisteredServers_ListByStorageSyncService.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/RegisteredServers_ListByStorageSyncService.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_trigger_rollover.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_trigger_rollover.py
index 832dc4616f94..d43ef553caea 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_trigger_rollover.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_trigger_rollover.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -29,7 +30,7 @@ def main():
subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
- response = client.registered_servers.begin_trigger_rollover(
+ client.registered_servers.begin_trigger_rollover(
resource_group_name="SampleResourceGroup_1",
storage_sync_service_name="SampleStorageSyncService_1",
server_id="d166ca76-dad2-49df-b409-12345642d730",
@@ -37,9 +38,8 @@ def main():
"serverCertificate": '"MIIDFjCCAf6gAwIBAgIQQS+DS8uhc4VNzUkTw7wbRjANBgkqhkiG9w0BAQ0FADAzMTEwLwYDVQQDEyhhbmt1c2hiLXByb2QzLnJlZG1vbmQuY29ycC5taWNyb3NvZnQuY29tMB4XDTE3MDgwMzE3MDQyNFoXDTE4MDgwNDE3MDQyNFowMzExMC8GA1UEAxMoYW5rdXNoYi1wcm9kMy5yZWRtb25kLmNvcnAubWljcm9zb2Z0LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALDRvV4gmsIy6jGDPiHsXmvgVP749NNP7DopdlbHaNhjFmYINHl0uWylyaZmgJrROt2mnxN/zEyJtGnqYHlzUr4xvGq/qV5pqgdB9tag/sw9i22gfe9PRZ0FmSOZnXMbLYgLiDFqLtut5gHcOuWMj03YnkfoBEKlFBxWbagvW2yxz/Sxi9OVSJOKCaXra0RpcIHrO/KFl6ho2eE1/7Ykmfa8hZvSdoPd5gHdLiQcMB/pxq+mWp1fI6c8vFZoDu7Atn+NXTzYPKUxKzaisF12TsaKpohUsJpbB3Wocb0F5frn614D2pg14ERB5otjAMWw1m65csQWPI6dP8KIYe0+QPkCAwEAAaMmMCQwIgYDVR0lAQH/BBgwFgYIKwYBBQUHAwIGCisGAQQBgjcKAwwwDQYJKoZIhvcNAQENBQADggEBAA4RhVIBkw34M1RwakJgHvtjsOFxF1tVQA941NtLokx1l2Z8+GFQkcG4xpZSt+UN6wLerdCbnNhtkCErWUDeaT0jxk4g71Ofex7iM04crT4iHJr8mi96/XnhnkTUs+GDk12VgdeeNEczMZz+8Mxw9dJ5NCnYgTwO0SzGlclRsDvjzkLo8rh2ZG6n/jKrEyNXXo+hOqhupij0QbRP2Tvexdfw201kgN1jdZify8XzJ8Oi0bTS0KpJf2pNPOlooK2bjMUei9ANtEdXwwfVZGWvVh6tJjdv6k14wWWJ1L7zhA1IIVb1J+sQUzJji5iX0DrezjTz1Fg+gAzITaA/WsuujlM="'
},
).result()
- print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/RegisteredServers_TriggerRollover.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/RegisteredServers_TriggerRollover.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_update.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_update.py
new file mode 100644
index 000000000000..df13c5fe68a2
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/registered_servers_update.py
@@ -0,0 +1,44 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.storagesync import MicrosoftStorageSync
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-storagesync
+# USAGE
+ python registered_servers_update.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = MicrosoftStorageSync(
+ credential=DefaultAzureCredential(),
+ subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
+ )
+
+ response = client.registered_servers.begin_update(
+ resource_group_name="SampleResourceGroup_1",
+ storage_sync_service_name="SampleStorageSyncService_1",
+ server_id="080d4133-bdb5-40a0-96a0-71a6057bfe9a",
+ parameters={"properties": {"applicationId": "120d4132-bcd5-40a0-96a0-71a6057ebf0c", "identity": True}},
+ ).result()
+ print(response)
+
+
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/RegisteredServers_Update.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_create.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_create.py
index 61ef339f0203..b5d8bdad99b7 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_create.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_create.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -52,6 +53,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/ServerEndpoints_Create.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/ServerEndpoints_Create.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_delete.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_delete.py
index aa7ae07e0b19..f47ac171848e 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_delete.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -29,15 +30,14 @@ def main():
subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
- response = client.server_endpoints.begin_delete(
+ client.server_endpoints.begin_delete(
resource_group_name="SampleResourceGroup_1",
storage_sync_service_name="SampleStorageSyncService_1",
sync_group_name="SampleSyncGroup_1",
server_endpoint_name="SampleServerEndpoint_1",
).result()
- print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/ServerEndpoints_Delete.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/ServerEndpoints_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_get.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_get.py
index 9f12022b3e3d..69377ba528df 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_get.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -38,6 +39,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/ServerEndpoints_Get.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/ServerEndpoints_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_list_by_sync_group.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_list_by_sync_group.py
index 14ba04f86220..515fe6c87134 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_list_by_sync_group.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_list_by_sync_group.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -38,6 +39,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/ServerEndpoints_ListBySyncGroup.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/ServerEndpoints_ListBySyncGroup.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_recall_action.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_recall.py
similarity index 89%
rename from sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_recall_action.py
rename to sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_recall.py
index 7561bb437998..69a6617a9285 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_recall_action.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_recall.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -14,7 +15,7 @@
pip install azure-identity
pip install azure-mgmt-storagesync
# USAGE
- python server_endpoints_recall_action.py
+ python server_endpoints_recall.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
@@ -29,16 +30,15 @@ def main():
subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
- response = client.server_endpoints.begin_recall_action(
+ client.server_endpoints.begin_recall_action(
resource_group_name="SampleResourceGroup_1",
storage_sync_service_name="SampleStorageSyncService_1",
sync_group_name="SampleSyncGroup_1",
server_endpoint_name="SampleServerEndpoint_1",
parameters={"pattern": "", "recallPath": ""},
).result()
- print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/ServerEndpoints_Recall.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/ServerEndpoints_Recall.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_update.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_update.py
index 81227e616719..77424fa4b2d9 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_update.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/server_endpoints_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -38,6 +39,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/ServerEndpoints_Update.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/ServerEndpoints_Update.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_service_check_name_availability_already_exists.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_service_check_name_availability_already_exists.py
index 4d6e7b3751f6..9be4eb462c5e 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_service_check_name_availability_already_exists.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_service_check_name_availability_already_exists.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -36,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/StorageSyncServiceCheckNameAvailability_AlreadyExists.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/StorageSyncServiceCheckNameAvailability_AlreadyExists.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_service_check_name_availability_available.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_service_check_name_availability_available.py
index f48c2585e039..655ba82e6f2b 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_service_check_name_availability_available.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_service_check_name_availability_available.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -36,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/StorageSyncServiceCheckNameAvailability_Available.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/StorageSyncServiceCheckNameAvailability_Available.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_create.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_create.py
index ca6f5dbbcee5..bce7ed38688d 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_create.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_create.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -32,11 +33,16 @@ def main():
response = client.storage_sync_services.begin_create(
resource_group_name="SampleResourceGroup_1",
storage_sync_service_name="SampleStorageSyncService_1",
- parameters={"location": "WestUS", "properties": {"incomingTrafficPolicy": "AllowAllTraffic"}, "tags": {}},
+ parameters={
+ "identity": {"type": "SystemAssigned, UserAssigned"},
+ "location": "WestUS",
+ "properties": {"incomingTrafficPolicy": "AllowAllTraffic"},
+ "tags": {},
+ },
).result()
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/StorageSyncServices_Create.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/StorageSyncServices_Create.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_delete.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_delete.py
index f2933cc806a3..0597362fb3c2 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_delete.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -29,13 +30,12 @@ def main():
subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
- response = client.storage_sync_services.begin_delete(
+ client.storage_sync_services.begin_delete(
resource_group_name="SampleResourceGroup_1",
storage_sync_service_name="SampleStorageSyncService_1",
).result()
- print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/StorageSyncServices_Delete.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/StorageSyncServices_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_get.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_get.py
index c5f39df02609..bd75622bef67 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_get.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -36,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/StorageSyncServices_Get.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/StorageSyncServices_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_list_by_resource_group.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_list_by_resource_group.py
index c63a43ade752..8e5cd33de631 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_list_by_resource_group.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_list_by_resource_group.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -36,6 +37,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/StorageSyncServices_ListByResourceGroup.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/StorageSyncServices_ListByResourceGroup.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_list_by_subscription.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_list_by_subscription.py
index 1999ca457276..11ff44155ab5 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_list_by_subscription.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_list_by_subscription.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -34,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/StorageSyncServices_ListBySubscription.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/StorageSyncServices_ListBySubscription.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_update.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_update.py
index 71cc10cb3171..df7a5cf2b0e8 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_update.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/storage_sync_services_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -36,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/StorageSyncServices_Update.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/StorageSyncServices_Update.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_create.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_create.py
index 567957d228fb..3ede9e2fe120 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_create.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_create.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -38,6 +39,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/SyncGroups_Create.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/SyncGroups_Create.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_delete.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_delete.py
index 15fc87b09e78..526d4638c1d6 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_delete.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -29,14 +30,13 @@ def main():
subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
- response = client.sync_groups.delete(
+ client.sync_groups.delete(
resource_group_name="SampleResourceGroup_1",
storage_sync_service_name="SampleStorageSyncService_1",
sync_group_name="SampleSyncGroup_1",
)
- print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/SyncGroups_Delete.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/SyncGroups_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_get.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_get.py
index ed8860ccbc25..34c81d1a7b96 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_get.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/SyncGroups_Get.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/SyncGroups_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_list_by_storage_sync_service.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_list_by_storage_sync_service.py
index 54b7dd51f282..8d631143b668 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_list_by_storage_sync_service.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/sync_groups_list_by_storage_sync_service.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -37,6 +38,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/SyncGroups_ListByStorageSyncService.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/SyncGroups_ListByStorageSyncService.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/workflows_abort.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/workflows_abort.py
index ae40e2473f27..fd2cc4269510 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/workflows_abort.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/workflows_abort.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -29,14 +30,13 @@ def main():
subscription_id="52b8da2f-61e0-4a1f-8dde-336911f367fb",
)
- response = client.workflows.abort(
+ client.workflows.abort(
resource_group_name="SampleResourceGroup_1",
storage_sync_service_name="SampleStorageSyncService_1",
workflow_id="7ffd50b3-5574-478d-9ff2-9371bc42ce68",
)
- print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/Workflows_Abort.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/Workflows_Abort.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/workflows_get.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/workflows_get.py
index 16fb0572825c..a3678b4e61f6 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/workflows_get.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/workflows_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/Workflows_Get.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/Workflows_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_samples/workflows_list_by_storage_sync_service.py b/sdk/storage/azure-mgmt-storagesync/generated_samples/workflows_list_by_storage_sync_service.py
index a67a55f62b8e..e60c60cef525 100644
--- a/sdk/storage/azure-mgmt-storagesync/generated_samples/workflows_list_by_storage_sync_service.py
+++ b/sdk/storage/azure-mgmt-storagesync/generated_samples/workflows_list_by_storage_sync_service.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.storagesync import MicrosoftStorageSync
"""
@@ -37,6 +38,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-06-01/examples/Workflows_ListByStorageSyncService.json
+# x-ms-original-file: specification/storagesync/resource-manager/Microsoft.StorageSync/stable/2022-09-01/examples/Workflows_ListByStorageSyncService.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/conftest.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/conftest.py
new file mode 100644
index 000000000000..7819009d5314
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/conftest.py
@@ -0,0 +1,39 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import os
+import pytest
+from dotenv import load_dotenv
+from devtools_testutils import (
+ test_proxy,
+ add_general_regex_sanitizer,
+ add_body_key_sanitizer,
+ add_header_regex_sanitizer,
+)
+
+load_dotenv()
+
+
+# For security, please avoid record sensitive identity information in recordings
+@pytest.fixture(scope="session", autouse=True)
+def add_sanitizers(test_proxy):
+ microsoftstoragesync_subscription_id = os.environ.get(
+ "AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000"
+ )
+ microsoftstoragesync_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000")
+ microsoftstoragesync_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000")
+ microsoftstoragesync_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(
+ regex=microsoftstoragesync_subscription_id, value="00000000-0000-0000-0000-000000000000"
+ )
+ add_general_regex_sanitizer(regex=microsoftstoragesync_tenant_id, value="00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(regex=microsoftstoragesync_client_id, value="00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(regex=microsoftstoragesync_client_secret, value="00000000-0000-0000-0000-000000000000")
+
+ add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]")
+ add_header_regex_sanitizer(key="Cookie", value="cookie;")
+ add_body_key_sanitizer(json_path="$..access_token", value="access_token")
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync.py
new file mode 100644
index 000000000000..4202b4f76308
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync.py
@@ -0,0 +1,31 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_location_operation_status(self, resource_group):
+ response = self.client.location_operation_status(
+ location_name="str",
+ operation_id="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_async.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_async.py
new file mode 100644
index 000000000000..b1c84f7cb637
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_async.py
@@ -0,0 +1,32 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync.aio import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_location_operation_status(self, resource_group):
+ response = await self.client.location_operation_status(
+ location_name="str",
+ operation_id="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_cloud_endpoints_operations.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_cloud_endpoints_operations.py
new file mode 100644
index 000000000000..ceda9373032a
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_cloud_endpoints_operations.py
@@ -0,0 +1,213 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncCloudEndpointsOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_cloud_endpoints_begin_create(self, resource_group):
+ response = self.client.cloud_endpoints.begin_create(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ parameters={
+ "azureFileShareName": "str",
+ "friendlyName": "str",
+ "id": "str",
+ "name": "str",
+ "storageAccountResourceId": "str",
+ "storageAccountTenantId": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_cloud_endpoints_get(self, resource_group):
+ response = self.client.cloud_endpoints.get(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_cloud_endpoints_begin_delete(self, resource_group):
+ response = self.client.cloud_endpoints.begin_delete(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_cloud_endpoints_list_by_sync_group(self, resource_group):
+ response = self.client.cloud_endpoints.list_by_sync_group(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ api_version="2022-09-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_cloud_endpoints_begin_pre_backup(self, resource_group):
+ response = self.client.cloud_endpoints.begin_pre_backup(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ parameters={"azureFileShare": "str"},
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_cloud_endpoints_begin_post_backup(self, resource_group):
+ response = self.client.cloud_endpoints.begin_post_backup(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ parameters={"azureFileShare": "str"},
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_cloud_endpoints_begin_pre_restore(self, resource_group):
+ response = self.client.cloud_endpoints.begin_pre_restore(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ parameters={
+ "azureFileShareUri": "str",
+ "backupMetadataPropertyBag": "str",
+ "partition": "str",
+ "pauseWaitForSyncDrainTimePeriodInSeconds": 0,
+ "replicaGroup": "str",
+ "requestId": "str",
+ "restoreFileSpec": [{"isdir": bool, "path": "str"}],
+ "sourceAzureFileShareUri": "str",
+ "status": "str",
+ },
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_cloud_endpoints_restoreheartbeat(self, resource_group):
+ response = self.client.cloud_endpoints.restoreheartbeat(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_cloud_endpoints_begin_post_restore(self, resource_group):
+ response = self.client.cloud_endpoints.begin_post_restore(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ parameters={
+ "azureFileShareUri": "str",
+ "failedFileList": "str",
+ "partition": "str",
+ "replicaGroup": "str",
+ "requestId": "str",
+ "restoreFileSpec": [{"isdir": bool, "path": "str"}],
+ "sourceAzureFileShareUri": "str",
+ "status": "str",
+ },
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_cloud_endpoints_begin_trigger_change_detection(self, resource_group):
+ response = self.client.cloud_endpoints.begin_trigger_change_detection(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ parameters={"changeDetectionMode": "str", "directoryPath": "str", "paths": ["str"]},
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_cloud_endpoints_afs_share_metadata_certificate_public_keys(self, resource_group):
+ response = self.client.cloud_endpoints.afs_share_metadata_certificate_public_keys(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_cloud_endpoints_operations_async.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_cloud_endpoints_operations_async.py
new file mode 100644
index 000000000000..b74a0f62245f
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_cloud_endpoints_operations_async.py
@@ -0,0 +1,228 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync.aio import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncCloudEndpointsOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_cloud_endpoints_begin_create(self, resource_group):
+ response = await (
+ await self.client.cloud_endpoints.begin_create(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ parameters={
+ "azureFileShareName": "str",
+ "friendlyName": "str",
+ "id": "str",
+ "name": "str",
+ "storageAccountResourceId": "str",
+ "storageAccountTenantId": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_cloud_endpoints_get(self, resource_group):
+ response = await self.client.cloud_endpoints.get(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_cloud_endpoints_begin_delete(self, resource_group):
+ response = await (
+ await self.client.cloud_endpoints.begin_delete(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_cloud_endpoints_list_by_sync_group(self, resource_group):
+ response = self.client.cloud_endpoints.list_by_sync_group(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ api_version="2022-09-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_cloud_endpoints_begin_pre_backup(self, resource_group):
+ response = await (
+ await self.client.cloud_endpoints.begin_pre_backup(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ parameters={"azureFileShare": "str"},
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_cloud_endpoints_begin_post_backup(self, resource_group):
+ response = await (
+ await self.client.cloud_endpoints.begin_post_backup(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ parameters={"azureFileShare": "str"},
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_cloud_endpoints_begin_pre_restore(self, resource_group):
+ response = await (
+ await self.client.cloud_endpoints.begin_pre_restore(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ parameters={
+ "azureFileShareUri": "str",
+ "backupMetadataPropertyBag": "str",
+ "partition": "str",
+ "pauseWaitForSyncDrainTimePeriodInSeconds": 0,
+ "replicaGroup": "str",
+ "requestId": "str",
+ "restoreFileSpec": [{"isdir": bool, "path": "str"}],
+ "sourceAzureFileShareUri": "str",
+ "status": "str",
+ },
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_cloud_endpoints_restoreheartbeat(self, resource_group):
+ response = await self.client.cloud_endpoints.restoreheartbeat(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_cloud_endpoints_begin_post_restore(self, resource_group):
+ response = await (
+ await self.client.cloud_endpoints.begin_post_restore(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ parameters={
+ "azureFileShareUri": "str",
+ "failedFileList": "str",
+ "partition": "str",
+ "replicaGroup": "str",
+ "requestId": "str",
+ "restoreFileSpec": [{"isdir": bool, "path": "str"}],
+ "sourceAzureFileShareUri": "str",
+ "status": "str",
+ },
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_cloud_endpoints_begin_trigger_change_detection(self, resource_group):
+ response = await (
+ await self.client.cloud_endpoints.begin_trigger_change_detection(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ parameters={"changeDetectionMode": "str", "directoryPath": "str", "paths": ["str"]},
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_cloud_endpoints_afs_share_metadata_certificate_public_keys(self, resource_group):
+ response = await self.client.cloud_endpoints.afs_share_metadata_certificate_public_keys(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ cloud_endpoint_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_operation_status_operations.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_operation_status_operations.py
new file mode 100644
index 000000000000..743e22a89151
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_operation_status_operations.py
@@ -0,0 +1,33 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncOperationStatusOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_operation_status_get(self, resource_group):
+ response = self.client.operation_status.get(
+ resource_group_name=resource_group.name,
+ location_name="str",
+ workflow_id="str",
+ operation_id="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_operation_status_operations_async.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_operation_status_operations_async.py
new file mode 100644
index 000000000000..e7d8fb0bba8d
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_operation_status_operations_async.py
@@ -0,0 +1,34 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync.aio import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncOperationStatusOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_operation_status_get(self, resource_group):
+ response = await self.client.operation_status.get(
+ resource_group_name=resource_group.name,
+ location_name="str",
+ workflow_id="str",
+ operation_id="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_operations.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_operations.py
new file mode 100644
index 000000000000..36b5db62f34d
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_operations.py
@@ -0,0 +1,29 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_operations_list(self, resource_group):
+ response = self.client.operations.list(
+ api_version="2022-09-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_operations_async.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_operations_async.py
new file mode 100644
index 000000000000..1d9cc5761fd9
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_operations_async.py
@@ -0,0 +1,30 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync.aio import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_operations_list(self, resource_group):
+ response = self.client.operations.list(
+ api_version="2022-09-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_private_endpoint_connections_operations.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_private_endpoint_connections_operations.py
new file mode 100644
index 000000000000..94494e57dabd
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_private_endpoint_connections_operations.py
@@ -0,0 +1,87 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncPrivateEndpointConnectionsOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_private_endpoint_connections_get(self, resource_group):
+ response = self.client.private_endpoint_connections.get(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ private_endpoint_connection_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_private_endpoint_connections_begin_create(self, resource_group):
+ response = self.client.private_endpoint_connections.begin_create(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ private_endpoint_connection_name="str",
+ properties={
+ "groupIds": ["str"],
+ "id": "str",
+ "name": "str",
+ "privateEndpoint": {"id": "str"},
+ "privateLinkServiceConnectionState": {"actionsRequired": "str", "description": "str", "status": "str"},
+ "provisioningState": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_private_endpoint_connections_begin_delete(self, resource_group):
+ response = self.client.private_endpoint_connections.begin_delete(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ private_endpoint_connection_name="str",
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_private_endpoint_connections_list_by_storage_sync_service(self, resource_group):
+ response = self.client.private_endpoint_connections.list_by_storage_sync_service(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_private_endpoint_connections_operations_async.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_private_endpoint_connections_operations_async.py
new file mode 100644
index 000000000000..571ee2dec323
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_private_endpoint_connections_operations_async.py
@@ -0,0 +1,96 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync.aio import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncPrivateEndpointConnectionsOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_private_endpoint_connections_get(self, resource_group):
+ response = await self.client.private_endpoint_connections.get(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ private_endpoint_connection_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_private_endpoint_connections_begin_create(self, resource_group):
+ response = await (
+ await self.client.private_endpoint_connections.begin_create(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ private_endpoint_connection_name="str",
+ properties={
+ "groupIds": ["str"],
+ "id": "str",
+ "name": "str",
+ "privateEndpoint": {"id": "str"},
+ "privateLinkServiceConnectionState": {
+ "actionsRequired": "str",
+ "description": "str",
+ "status": "str",
+ },
+ "provisioningState": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_private_endpoint_connections_begin_delete(self, resource_group):
+ response = await (
+ await self.client.private_endpoint_connections.begin_delete(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ private_endpoint_connection_name="str",
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_private_endpoint_connections_list_by_storage_sync_service(self, resource_group):
+ response = self.client.private_endpoint_connections.list_by_storage_sync_service(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_private_link_resources_operations.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_private_link_resources_operations.py
new file mode 100644
index 000000000000..0dbdd9fb023f
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_private_link_resources_operations.py
@@ -0,0 +1,31 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncPrivateLinkResourcesOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_private_link_resources_list_by_storage_sync_service(self, resource_group):
+ response = self.client.private_link_resources.list_by_storage_sync_service(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_private_link_resources_operations_async.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_private_link_resources_operations_async.py
new file mode 100644
index 000000000000..1cefe4e7f833
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_private_link_resources_operations_async.py
@@ -0,0 +1,32 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync.aio import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncPrivateLinkResourcesOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_private_link_resources_list_by_storage_sync_service(self, resource_group):
+ response = await self.client.private_link_resources.list_by_storage_sync_service(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_registered_servers_operations.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_registered_servers_operations.py
new file mode 100644
index 000000000000..428901da621a
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_registered_servers_operations.py
@@ -0,0 +1,136 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncRegisteredServersOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_registered_servers_list_by_storage_sync_service(self, resource_group):
+ response = self.client.registered_servers.list_by_storage_sync_service(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_registered_servers_get(self, resource_group):
+ response = self.client.registered_servers.get(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ server_id="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_registered_servers_begin_create(self, resource_group):
+ response = self.client.registered_servers.begin_create(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ server_id="str",
+ parameters={
+ "agentVersion": "str",
+ "applicationId": "str",
+ "clusterId": "str",
+ "clusterName": "str",
+ "friendlyName": "str",
+ "id": "str",
+ "identity": bool,
+ "lastHeartBeat": "str",
+ "name": "str",
+ "serverCertificate": "str",
+ "serverId": "str",
+ "serverOSVersion": "str",
+ "serverRole": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_registered_servers_begin_update(self, resource_group):
+ response = self.client.registered_servers.begin_update(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ server_id="str",
+ parameters={
+ "applicationId": "str",
+ "id": "str",
+ "identity": bool,
+ "name": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_registered_servers_begin_delete(self, resource_group):
+ response = self.client.registered_servers.begin_delete(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ server_id="str",
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_registered_servers_begin_trigger_rollover(self, resource_group):
+ response = self.client.registered_servers.begin_trigger_rollover(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ server_id="str",
+ parameters={"serverCertificate": "str"},
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_registered_servers_operations_async.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_registered_servers_operations_async.py
new file mode 100644
index 000000000000..9e6570205378
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_registered_servers_operations_async.py
@@ -0,0 +1,145 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync.aio import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncRegisteredServersOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_registered_servers_list_by_storage_sync_service(self, resource_group):
+ response = self.client.registered_servers.list_by_storage_sync_service(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_registered_servers_get(self, resource_group):
+ response = await self.client.registered_servers.get(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ server_id="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_registered_servers_begin_create(self, resource_group):
+ response = await (
+ await self.client.registered_servers.begin_create(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ server_id="str",
+ parameters={
+ "agentVersion": "str",
+ "applicationId": "str",
+ "clusterId": "str",
+ "clusterName": "str",
+ "friendlyName": "str",
+ "id": "str",
+ "identity": bool,
+ "lastHeartBeat": "str",
+ "name": "str",
+ "serverCertificate": "str",
+ "serverId": "str",
+ "serverOSVersion": "str",
+ "serverRole": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_registered_servers_begin_update(self, resource_group):
+ response = await (
+ await self.client.registered_servers.begin_update(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ server_id="str",
+ parameters={
+ "applicationId": "str",
+ "id": "str",
+ "identity": bool,
+ "name": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_registered_servers_begin_delete(self, resource_group):
+ response = await (
+ await self.client.registered_servers.begin_delete(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ server_id="str",
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_registered_servers_begin_trigger_rollover(self, resource_group):
+ response = await (
+ await self.client.registered_servers.begin_trigger_rollover(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ server_id="str",
+ parameters={"serverCertificate": "str"},
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_server_endpoints_operations.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_server_endpoints_operations.py
new file mode 100644
index 000000000000..cb8d27fc75e8
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_server_endpoints_operations.py
@@ -0,0 +1,127 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncServerEndpointsOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_server_endpoints_begin_create(self, resource_group):
+ response = self.client.server_endpoints.begin_create(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ server_endpoint_name="str",
+ parameters={
+ "cloudTiering": "str",
+ "friendlyName": "str",
+ "id": "str",
+ "initialDownloadPolicy": "NamespaceThenModifiedFiles",
+ "initialUploadPolicy": "Merge",
+ "localCacheMode": "UpdateLocallyCachedFiles",
+ "name": "str",
+ "offlineDataTransfer": "str",
+ "offlineDataTransferShareName": "str",
+ "serverLocalPath": "str",
+ "serverResourceId": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tierFilesOlderThanDays": 0,
+ "type": "str",
+ "volumeFreeSpacePercent": 20,
+ },
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_server_endpoints_begin_update(self, resource_group):
+ response = self.client.server_endpoints.begin_update(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ server_endpoint_name="str",
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_server_endpoints_get(self, resource_group):
+ response = self.client.server_endpoints.get(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ server_endpoint_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_server_endpoints_begin_delete(self, resource_group):
+ response = self.client.server_endpoints.begin_delete(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ server_endpoint_name="str",
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_server_endpoints_list_by_sync_group(self, resource_group):
+ response = self.client.server_endpoints.list_by_sync_group(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ api_version="2022-09-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_server_endpoints_begin_recall_action(self, resource_group):
+ response = self.client.server_endpoints.begin_recall_action(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ server_endpoint_name="str",
+ parameters={"pattern": "str", "recallPath": "str"},
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_server_endpoints_operations_async.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_server_endpoints_operations_async.py
new file mode 100644
index 000000000000..513e2a607c99
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_server_endpoints_operations_async.py
@@ -0,0 +1,136 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync.aio import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncServerEndpointsOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_server_endpoints_begin_create(self, resource_group):
+ response = await (
+ await self.client.server_endpoints.begin_create(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ server_endpoint_name="str",
+ parameters={
+ "cloudTiering": "str",
+ "friendlyName": "str",
+ "id": "str",
+ "initialDownloadPolicy": "NamespaceThenModifiedFiles",
+ "initialUploadPolicy": "Merge",
+ "localCacheMode": "UpdateLocallyCachedFiles",
+ "name": "str",
+ "offlineDataTransfer": "str",
+ "offlineDataTransferShareName": "str",
+ "serverLocalPath": "str",
+ "serverResourceId": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tierFilesOlderThanDays": 0,
+ "type": "str",
+ "volumeFreeSpacePercent": 20,
+ },
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_server_endpoints_begin_update(self, resource_group):
+ response = await (
+ await self.client.server_endpoints.begin_update(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ server_endpoint_name="str",
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_server_endpoints_get(self, resource_group):
+ response = await self.client.server_endpoints.get(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ server_endpoint_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_server_endpoints_begin_delete(self, resource_group):
+ response = await (
+ await self.client.server_endpoints.begin_delete(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ server_endpoint_name="str",
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_server_endpoints_list_by_sync_group(self, resource_group):
+ response = self.client.server_endpoints.list_by_sync_group(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ api_version="2022-09-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_server_endpoints_begin_recall_action(self, resource_group):
+ response = await (
+ await self.client.server_endpoints.begin_recall_action(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ server_endpoint_name="str",
+ parameters={"pattern": "str", "recallPath": "str"},
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_storage_sync_services_operations.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_storage_sync_services_operations.py
new file mode 100644
index 000000000000..8bdea8b1b97a
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_storage_sync_services_operations.py
@@ -0,0 +1,123 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncStorageSyncServicesOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_sync_services_check_name_availability(self, resource_group):
+ response = self.client.storage_sync_services.check_name_availability(
+ location_name="str",
+ parameters={"name": "str", "type": "Microsoft.StorageSync/storageSyncServices"},
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_sync_services_begin_create(self, resource_group):
+ response = self.client.storage_sync_services.begin_create(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ parameters={
+ "location": "str",
+ "id": "str",
+ "identity": {
+ "type": "str",
+ "principalId": "str",
+ "tenantId": "str",
+ "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}},
+ },
+ "incomingTrafficPolicy": "str",
+ "name": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ "useIdentity": bool,
+ },
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_sync_services_get(self, resource_group):
+ response = self.client.storage_sync_services.get(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_sync_services_begin_update(self, resource_group):
+ response = self.client.storage_sync_services.begin_update(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_sync_services_begin_delete(self, resource_group):
+ response = self.client.storage_sync_services.begin_delete(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_sync_services_list_by_resource_group(self, resource_group):
+ response = self.client.storage_sync_services.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2022-09-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_sync_services_list_by_subscription(self, resource_group):
+ response = self.client.storage_sync_services.list_by_subscription(
+ api_version="2022-09-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_storage_sync_services_operations_async.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_storage_sync_services_operations_async.py
new file mode 100644
index 000000000000..d1ce40ec0b83
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_storage_sync_services_operations_async.py
@@ -0,0 +1,130 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync.aio import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncStorageSyncServicesOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_sync_services_check_name_availability(self, resource_group):
+ response = await self.client.storage_sync_services.check_name_availability(
+ location_name="str",
+ parameters={"name": "str", "type": "Microsoft.StorageSync/storageSyncServices"},
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_sync_services_begin_create(self, resource_group):
+ response = await (
+ await self.client.storage_sync_services.begin_create(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ parameters={
+ "location": "str",
+ "id": "str",
+ "identity": {
+ "type": "str",
+ "principalId": "str",
+ "tenantId": "str",
+ "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}},
+ },
+ "incomingTrafficPolicy": "str",
+ "name": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ "useIdentity": bool,
+ },
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_sync_services_get(self, resource_group):
+ response = await self.client.storage_sync_services.get(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_sync_services_begin_update(self, resource_group):
+ response = await (
+ await self.client.storage_sync_services.begin_update(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_sync_services_begin_delete(self, resource_group):
+ response = await (
+ await self.client.storage_sync_services.begin_delete(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_sync_services_list_by_resource_group(self, resource_group):
+ response = self.client.storage_sync_services.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2022-09-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_sync_services_list_by_subscription(self, resource_group):
+ response = self.client.storage_sync_services.list_by_subscription(
+ api_version="2022-09-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_sync_groups_operations.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_sync_groups_operations.py
new file mode 100644
index 000000000000..1e7eda1b15aa
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_sync_groups_operations.py
@@ -0,0 +1,84 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncSyncGroupsOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_sync_groups_list_by_storage_sync_service(self, resource_group):
+ response = self.client.sync_groups.list_by_storage_sync_service(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_sync_groups_create(self, resource_group):
+ response = self.client.sync_groups.create(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ parameters={
+ "id": "str",
+ "name": "str",
+ "properties": {},
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_sync_groups_get(self, resource_group):
+ response = self.client.sync_groups.get(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_sync_groups_delete(self, resource_group):
+ response = self.client.sync_groups.delete(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_sync_groups_operations_async.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_sync_groups_operations_async.py
new file mode 100644
index 000000000000..2d563094fda0
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_sync_groups_operations_async.py
@@ -0,0 +1,85 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync.aio import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncSyncGroupsOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_sync_groups_list_by_storage_sync_service(self, resource_group):
+ response = self.client.sync_groups.list_by_storage_sync_service(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_sync_groups_create(self, resource_group):
+ response = await self.client.sync_groups.create(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ parameters={
+ "id": "str",
+ "name": "str",
+ "properties": {},
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "type": "str",
+ },
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_sync_groups_get(self, resource_group):
+ response = await self.client.sync_groups.get(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_sync_groups_delete(self, resource_group):
+ response = await self.client.sync_groups.delete(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ sync_group_name="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_workflows_operations.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_workflows_operations.py
new file mode 100644
index 000000000000..fb50174d4337
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_workflows_operations.py
@@ -0,0 +1,57 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncWorkflowsOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_workflows_list_by_storage_sync_service(self, resource_group):
+ response = self.client.workflows.list_by_storage_sync_service(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_workflows_get(self, resource_group):
+ response = self.client.workflows.get(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ workflow_id="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_workflows_abort(self, resource_group):
+ response = self.client.workflows.abort(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ workflow_id="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_workflows_operations_async.py b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_workflows_operations_async.py
new file mode 100644
index 000000000000..4e87410f54e2
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagesync/generated_tests/test_microsoft_storage_sync_workflows_operations_async.py
@@ -0,0 +1,58 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagesync.aio import MicrosoftStorageSync
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestMicrosoftStorageSyncWorkflowsOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(MicrosoftStorageSync, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_workflows_list_by_storage_sync_service(self, resource_group):
+ response = self.client.workflows.list_by_storage_sync_service(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ api_version="2022-09-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_workflows_get(self, resource_group):
+ response = await self.client.workflows.get(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ workflow_id="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_workflows_abort(self, resource_group):
+ response = await self.client.workflows.abort(
+ resource_group_name=resource_group.name,
+ storage_sync_service_name="str",
+ workflow_id="str",
+ api_version="2022-09-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagesync/setup.py b/sdk/storage/azure-mgmt-storagesync/setup.py
index 26c9fa642a5e..831b6d585992 100644
--- a/sdk/storage/azure-mgmt-storagesync/setup.py
+++ b/sdk/storage/azure-mgmt-storagesync/setup.py
@@ -53,11 +53,11 @@
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
"License :: OSI Approved :: MIT License",
],
zip_safe=False,
@@ -74,10 +74,10 @@
"pytyped": ["py.typed"],
},
install_requires=[
- "isodate<1.0.0,>=0.6.1",
- "azure-common~=1.1",
- "azure-mgmt-core>=1.3.2,<2.0.0",
- "typing-extensions>=4.3.0; python_version<'3.8.0'",
+ "isodate>=0.6.1",
+ "typing-extensions>=4.6.0",
+ "azure-common>=1.1",
+ "azure-mgmt-core>=1.3.2",
],
- python_requires=">=3.7",
+ python_requires=">=3.8",
)