diff --git a/sdk/storage/azure-mgmt-storagecache/_meta.json b/sdk/storage/azure-mgmt-storagecache/_meta.json
index 0f48132ed4ec..1f242f8795ff 100644
--- a/sdk/storage/azure-mgmt-storagecache/_meta.json
+++ b/sdk/storage/azure-mgmt-storagecache/_meta.json
@@ -1,11 +1,11 @@
{
- "commit": "a013dabbe84aeb3f5d48b0e30d15fdfbb6a8d062",
+ "commit": "9ca8aaa881c30a59bb1bfe4081209485f91187b5",
"repository_url": "https://github.com/Azure/azure-rest-api-specs",
- "autorest": "3.9.7",
+ "autorest": "3.10.2",
"use": [
- "@autorest/python@6.13.7",
+ "@autorest/python@6.27.4",
"@autorest/modelerfour@4.27.0"
],
- "autorest_command": "autorest specification/storagecache/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.13.7 --use=@autorest/modelerfour@4.27.0 --version=3.9.7 --version-tolerant=False",
+ "autorest_command": "autorest specification/storagecache/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.27.4 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False",
"readme": "specification/storagecache/resource-manager/readme.md"
}
\ No newline at end of file
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/__init__.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/__init__.py
index 005e6bc6bdf6..ee2f189055f4 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/__init__.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/__init__.py
@@ -5,15 +5,21 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._storage_cache_management_client import StorageCacheManagementClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._storage_cache_management_client import StorageCacheManagementClient # type: ignore
from ._version import VERSION
__version__ = VERSION
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -21,6 +27,6 @@
__all__ = [
"StorageCacheManagementClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_configuration.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_configuration.py
index 4f316586e2ae..02dfc1f9aa56 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_configuration.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_configuration.py
@@ -14,7 +14,6 @@
from ._version import VERSION
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
@@ -28,13 +27,13 @@ class StorageCacheManagementClientConfiguration: # pylint: disable=too-many-ins
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The ID of the target subscription. Required.
:type subscription_id: str
- :keyword api_version: Api Version. Default value is "2024-03-01". Note that overriding this
+ :keyword api_version: Api Version. Default value is "2024-07-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None:
- api_version: str = kwargs.pop("api_version", "2024-03-01")
+ api_version: str = kwargs.pop("api_version", "2024-07-01")
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_patch.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_patch.py
index f99e77fef986..17dbc073e01b 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_patch.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_patch.py
@@ -25,6 +25,7 @@
#
# --------------------------------------------------------------------------
+
# This file is used for handwritten extensions to the generated code. Example:
# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
def patch_sdk():
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_serialization.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_serialization.py
index 2f781d740827..b24ab2885450 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_serialization.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_serialization.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -24,7 +25,6 @@
#
# --------------------------------------------------------------------------
-# pylint: skip-file
# pyright: reportUnnecessaryTypeIgnoreComment=false
from base64 import b64decode, b64encode
@@ -52,7 +52,6 @@
MutableMapping,
Type,
List,
- Mapping,
)
try:
@@ -91,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
:param data: Input, could be bytes or stream (will be decoded with UTF8) or text
:type data: str or bytes or IO
:param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
"""
if hasattr(data, "read"):
# Assume a stream
@@ -112,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
try:
return json.loads(data_as_str)
except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err)
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
elif "xml" in (content_type or []):
try:
@@ -144,6 +145,8 @@ def _json_attemp(data):
# context otherwise.
_LOGGER.critical("Wasn't XML not JSON, failing")
raise DeserializationError("XML is invalid") from err
+ elif content_type.startswith("text/"):
+ return data_as_str
raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
@classmethod
@@ -153,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
Use bytes and headers to NOT use any requests/aiohttp or whatever
specific implementation.
Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
"""
# Try to use content-type from headers if available
content_type = None
@@ -182,15 +190,30 @@ class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
- """UTF offset for UTC is 0."""
+ """UTF offset for UTC is 0.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The offset
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(0)
def tzname(self, dt):
- """Timestamp representation."""
+ """Timestamp representation.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The timestamp representation
+ :rtype: str
+ """
return "Z"
def dst(self, dt):
- """No daylight saving for UTC."""
+ """No daylight saving for UTC.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The daylight saving time
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(hours=1)
@@ -204,7 +227,7 @@ class _FixedOffset(datetime.tzinfo): # type: ignore
:param datetime.timedelta offset: offset in timedelta format
"""
- def __init__(self, offset):
+ def __init__(self, offset) -> None:
self.__offset = offset
def utcoffset(self, dt):
@@ -233,24 +256,26 @@ def __getinitargs__(self):
_FLATTEN = re.compile(r"(? None:
self.additional_properties: Optional[Dict[str, Any]] = {}
- for k in kwargs:
+ for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
elif k in self._validation and self._validation[k].get("readonly", False):
@@ -298,13 +330,23 @@ def __init__(self, **kwargs: Any) -> None:
setattr(self, k, kwargs[k])
def __eq__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
return not self.__eq__(other)
def __str__(self) -> str:
@@ -324,7 +366,11 @@ def is_xml_model(cls) -> bool:
@classmethod
def _create_xml_node(cls):
- """Create XML node."""
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
try:
xml_map = cls._xml_map # type: ignore
except AttributeError:
@@ -344,7 +390,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
def as_dict(
self,
@@ -378,12 +426,15 @@ def my_key_transformer(key, attr_desc, value):
If you want XML serialization, you can pass the kwargs is_xml=True.
+ :param bool keep_readonly: If you want to serialize the readonly attributes
:param function key_transformer: A key transformer function.
:returns: A dict JSON compatible object
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
@classmethod
def _infer_class_models(cls):
@@ -393,7 +444,7 @@ def _infer_class_models(cls):
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
if cls.__name__ not in client_models:
raise ValueError("Not Autorest generated code")
- except Exception:
+ except Exception: # pylint: disable=broad-exception-caught
# Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
client_models = {cls.__name__: cls}
return client_models
@@ -406,6 +457,7 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@@ -424,9 +476,11 @@ def from_dict(
and last_rest_key_case_insensitive_extractor)
:param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
deserializer.key_extractors = ( # type: ignore
@@ -446,21 +500,25 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects))
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
return result
@classmethod
def _classify(cls, response, objects):
"""Check the class _subtype_map for any child classes.
We want to ignore any inherited _subtype_maps.
- Remove the polymorphic key from the initial data.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
"""
for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
subtype_value = None
if not isinstance(response, ET.Element):
rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
- subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None)
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
else:
subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
if subtype_value:
@@ -499,11 +557,13 @@ def _decode_attribute_map_key(key):
inside the received data.
:param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
"""
return key.replace("\\.", ".")
-class Serializer(object):
+class Serializer: # pylint: disable=too-many-public-methods
"""Request object model serializer."""
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
@@ -538,7 +598,7 @@ class Serializer(object):
"multiple": lambda x, y: x % y != 0,
}
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.serialize_type = {
"iso-8601": Serializer.serialize_iso,
"rfc-1123": Serializer.serialize_rfc,
@@ -558,13 +618,16 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None):
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
- def _serialize(self, target_obj, data_type=None, **kwargs):
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
"""Serialize data into a string according to type.
- :param target_obj: The data to be serialized.
+ :param object target_obj: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str, dict
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
"""
key_transformer = kwargs.get("key_transformer", self.key_transformer)
keep_readonly = kwargs.get("keep_readonly", False)
@@ -590,12 +653,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
serialized = {}
if is_xml_model_serialization:
- serialized = target_obj._create_xml_node()
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
try:
- attributes = target_obj._attribute_map
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
for attr, attr_desc in attributes.items():
attr_name = attr
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
continue
if attr_name == "additional_properties" and attr_desc["key"] == "":
@@ -631,7 +696,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if isinstance(new_attr, list):
serialized.extend(new_attr) # type: ignore
elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
if "name" not in getattr(orig_attr, "_xml_map", {}):
splitted_tag = new_attr.tag.split("}")
if len(splitted_tag) == 2: # Namespace
@@ -662,17 +728,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
raise SerializationError(msg) from err
- else:
- return serialized
+ return serialized
def body(self, data, data_type, **kwargs):
"""Serialize data intended for a request body.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: dict
:raises: SerializationError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized request body
"""
# Just in case this is a dict
@@ -701,7 +767,7 @@ def body(self, data, data_type, **kwargs):
attribute_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
]
- data = deserializer._deserialize(data_type, data)
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
except DeserializationError as err:
raise SerializationError("Unable to build a model: " + str(err)) from err
@@ -710,9 +776,11 @@ def body(self, data, data_type, **kwargs):
def url(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL path.
- :param data: The data to be serialized.
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
+ :returns: The serialized URL path
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
"""
@@ -726,21 +794,20 @@ def url(self, name, data, data_type, **kwargs):
output = output.replace("{", quote("{")).replace("}", quote("}"))
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return output
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
def query(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL query.
- :param data: The data to be serialized.
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :keyword bool skip_quote: Whether to skip quote the serialized result.
- Defaults to False.
:rtype: str, list
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized query parameter
"""
try:
# Treat the list aside, since we don't want to encode the div separator
@@ -757,19 +824,20 @@ def query(self, name, data, data_type, **kwargs):
output = str(output)
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def header(self, name, data, data_type, **kwargs):
"""Serialize data intended for a request header.
- :param data: The data to be serialized.
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized header
"""
try:
if data_type in ["[str]"]:
@@ -778,21 +846,20 @@ def header(self, name, data, data_type, **kwargs):
output = self.serialize_data(data, data_type, **kwargs)
if data_type == "bool":
output = json.dumps(output)
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def serialize_data(self, data, data_type, **kwargs):
"""Serialize generic data according to supplied data type.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :param bool required: Whether it's essential that the data not be
- empty or None
:raises: AttributeError if required data is None.
:raises: ValueError if data is None
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
"""
if data is None:
raise ValueError("No value for given attribute")
@@ -803,7 +870,7 @@ def serialize_data(self, data, data_type, **kwargs):
if data_type in self.basic_types.values():
return self.serialize_basic(data, data_type, **kwargs)
- elif data_type in self.serialize_type:
+ if data_type in self.serialize_type:
return self.serialize_type[data_type](data, **kwargs)
# If dependencies is empty, try with current data class
@@ -819,11 +886,10 @@ def serialize_data(self, data, data_type, **kwargs):
except (ValueError, TypeError) as err:
msg = "Unable to serialize value: {!r} as type: {!r}."
raise SerializationError(msg.format(data, data_type)) from err
- else:
- return self._serialize(data, **kwargs)
+ return self._serialize(data, **kwargs)
@classmethod
- def _get_custom_serializers(cls, data_type, **kwargs):
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
if custom_serializer:
return custom_serializer
@@ -839,23 +905,26 @@ def serialize_basic(cls, data, data_type, **kwargs):
- basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- is_xml bool : If set, use xml_basic_types_serializers
- :param data: Object to be serialized.
+ :param obj data: Object to be serialized.
:param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
@classmethod
def serialize_unicode(cls, data):
"""Special handling for serializing unicode strings in Py2.
Encode to UTF-8 if unicode, otherwise handle as a str.
- :param data: Object to be serialized.
+ :param str data: Object to be serialized.
:rtype: str
+ :return: serialized object
"""
try: # If I received an enum, return its value
return data.value
@@ -869,8 +938,7 @@ def serialize_unicode(cls, data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
def serialize_iter(self, data, iter_type, div=None, **kwargs):
"""Serialize iterable.
@@ -880,15 +948,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
serialization_ctxt['type'] should be same as data_type.
- is_xml bool : If set, serialize as XML
- :param list attr: Object to be serialized.
+ :param list data: Object to be serialized.
:param str iter_type: Type of object in the iterable.
- :param bool required: Whether the objects in the iterable must
- not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
- :keyword bool do_quote: Whether to quote the serialized result of each iterable element.
Defaults to False.
:rtype: list, str
+ :return: serialized iterable
"""
if isinstance(data, str):
raise SerializationError("Refuse str type as a valid iter type.")
@@ -943,9 +1009,8 @@ def serialize_dict(self, attr, dict_type, **kwargs):
:param dict attr: Object to be serialized.
:param str dict_type: Type of object in the dictionary.
- :param bool required: Whether the objects in the dictionary must
- not be None or empty.
:rtype: dict
+ :return: serialized dictionary
"""
serialization_ctxt = kwargs.get("serialization_ctxt", {})
serialized = {}
@@ -969,7 +1034,7 @@ def serialize_dict(self, attr, dict_type, **kwargs):
return serialized
- def serialize_object(self, attr, **kwargs):
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Serialize a generic object.
This will be handled as a dictionary. If object passed in is not
a basic type (str, int, float, dict, list) it will simply be
@@ -977,6 +1042,7 @@ def serialize_object(self, attr, **kwargs):
:param dict attr: Object to be serialized.
:rtype: dict or str
+ :return: serialized object
"""
if attr is None:
return None
@@ -1001,7 +1067,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_decimal(attr)
# If it's a model or I know this dependency, serialize as a Model
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
return self._serialize(attr)
if obj_type == dict:
@@ -1032,56 +1098,61 @@ def serialize_enum(attr, enum_obj=None):
try:
enum_obj(result) # type: ignore
return result
- except ValueError:
+ except ValueError as exc:
for enum_value in enum_obj: # type: ignore
if enum_value.value.lower() == str(attr).lower():
return enum_value.value
error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj))
+ raise SerializationError(error.format(attr, enum_obj)) from exc
@staticmethod
- def serialize_bytearray(attr, **kwargs):
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize bytearray into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
return b64encode(attr).decode()
@staticmethod
- def serialize_base64(attr, **kwargs):
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize str into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
encoded = b64encode(attr).decode("ascii")
return encoded.strip("=").replace("+", "-").replace("/", "_")
@staticmethod
- def serialize_decimal(attr, **kwargs):
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Decimal object to float.
- :param attr: Object to be serialized.
+ :param decimal attr: Object to be serialized.
:rtype: float
+ :return: serialized decimal
"""
return float(attr)
@staticmethod
- def serialize_long(attr, **kwargs):
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize long (Py2) or int (Py3).
- :param attr: Object to be serialized.
+ :param int attr: Object to be serialized.
:rtype: int/long
+ :return: serialized long
"""
return _long_type(attr)
@staticmethod
- def serialize_date(attr, **kwargs):
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Date object into ISO-8601 formatted string.
:param Date attr: Object to be serialized.
:rtype: str
+ :return: serialized date
"""
if isinstance(attr, str):
attr = isodate.parse_date(attr)
@@ -1089,11 +1160,12 @@ def serialize_date(attr, **kwargs):
return t
@staticmethod
- def serialize_time(attr, **kwargs):
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Time object into ISO-8601 formatted string.
:param datetime.time attr: Object to be serialized.
:rtype: str
+ :return: serialized time
"""
if isinstance(attr, str):
attr = isodate.parse_time(attr)
@@ -1103,30 +1175,32 @@ def serialize_time(attr, **kwargs):
return t
@staticmethod
- def serialize_duration(attr, **kwargs):
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize TimeDelta object into ISO-8601 formatted string.
:param TimeDelta attr: Object to be serialized.
:rtype: str
+ :return: serialized duration
"""
if isinstance(attr, str):
attr = isodate.parse_duration(attr)
return isodate.duration_isoformat(attr)
@staticmethod
- def serialize_rfc(attr, **kwargs):
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into RFC-1123 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: TypeError if format invalid.
+ :return: serialized rfc
"""
try:
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
utc = attr.utctimetuple()
- except AttributeError:
- raise TypeError("RFC1123 object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
Serializer.days[utc.tm_wday],
@@ -1139,12 +1213,13 @@ def serialize_rfc(attr, **kwargs):
)
@staticmethod
- def serialize_iso(attr, **kwargs):
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: SerializationError if format invalid.
+ :return: serialized iso
"""
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
@@ -1170,13 +1245,14 @@ def serialize_iso(attr, **kwargs):
raise TypeError(msg) from err
@staticmethod
- def serialize_unix(attr, **kwargs):
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into IntTime format.
This is represented as seconds.
:param Datetime attr: Object to be serialized.
:rtype: int
:raises: SerializationError if format invalid
+ :return: serialied unix
"""
if isinstance(attr, int):
return attr
@@ -1184,11 +1260,11 @@ def serialize_unix(attr, **kwargs):
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError:
- raise TypeError("Unix time object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
-def rest_key_extractor(attr, attr_desc, data):
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
key = attr_desc["key"]
working_data = data
@@ -1209,7 +1285,9 @@ def rest_key_extractor(attr, attr_desc, data):
return working_data.get(key)
-def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
key = attr_desc["key"]
working_data = data
@@ -1230,17 +1308,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
return attribute_key_case_insensitive_extractor(key, None, working_data)
-def last_rest_key_extractor(attr, attr_desc, data):
- """Extract the attribute in "data" based on the last part of the JSON path key."""
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
return attribute_key_extractor(dict_keys[-1], None, data)
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
"""Extract the attribute in "data" based on the last part of the JSON path key.
This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
"""
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
@@ -1277,7 +1367,7 @@ def _extract_name_from_internal_type(internal_type):
return xml_name
-def xml_key_extractor(attr, attr_desc, data):
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
if isinstance(data, dict):
return None
@@ -1329,22 +1419,21 @@ def xml_key_extractor(attr, attr_desc, data):
if is_iter_type:
if is_wrapped:
return None # is_wrapped no node, we want None
- else:
- return [] # not wrapped, assume empty list
+ return [] # not wrapped, assume empty list
return None # Assume it's not there, maybe an optional node.
# If is_iter_type and not wrapped, return all found children
if is_iter_type:
if not is_wrapped:
return children
- else: # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
)
- return list(children[0]) # Might be empty list and that's ok.
+ )
+ return list(children[0]) # Might be empty list and that's ok.
# Here it's not a itertype, we should have found one element only or empty
if len(children) > 1:
@@ -1352,7 +1441,7 @@ def xml_key_extractor(attr, attr_desc, data):
return children[0]
-class Deserializer(object):
+class Deserializer:
"""Response object model deserializer.
:param dict classes: Class type dictionary for deserializing complex types.
@@ -1361,9 +1450,9 @@ class Deserializer(object):
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.deserialize_type = {
"iso-8601": Deserializer.deserialize_iso,
"rfc-1123": Deserializer.deserialize_rfc,
@@ -1401,11 +1490,12 @@ def __call__(self, target_obj, response_data, content_type=None):
:param str content_type: Swagger "produces" if available.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
data = self._unpack_content(response_data, content_type)
return self._deserialize(target_obj, data)
- def _deserialize(self, target_obj, data):
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
"""Call the deserializer on a model.
Data needs to be already deserialized as JSON or XML ElementTree
@@ -1414,12 +1504,13 @@ def _deserialize(self, target_obj, data):
:param object data: Object to deserialize.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
# This is already a model, go recursive just in case
if hasattr(data, "_attribute_map"):
constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
try:
- for attr, mapconfig in data._attribute_map.items():
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
if attr in constants:
continue
value = getattr(data, attr)
@@ -1438,13 +1529,13 @@ def _deserialize(self, target_obj, data):
if isinstance(response, str):
return self.deserialize_data(data, response)
- elif isinstance(response, type) and issubclass(response, Enum):
+ if isinstance(response, type) and issubclass(response, Enum):
return self.deserialize_enum(data, response)
- if data is None:
+ if data is None or data is CoreNull:
return data
try:
- attributes = response._attribute_map # type: ignore
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
d_attrs = {}
for attr, attr_desc in attributes.items():
# Check empty string. If it's not empty, someone has a real "additionalProperties"...
@@ -1474,9 +1565,8 @@ def _deserialize(self, target_obj, data):
except (AttributeError, TypeError, KeyError) as err:
msg = "Unable to deserialize to object: " + class_name # type: ignore
raise DeserializationError(msg) from err
- else:
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
def _build_additional_properties(self, attribute_map, data):
if not self.additional_properties_detection:
@@ -1503,6 +1593,8 @@ def _classify_target(self, target, data):
:param str target: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
"""
if target is None:
return None, None
@@ -1514,7 +1606,7 @@ def _classify_target(self, target, data):
return target, target
try:
- target = target._classify(data, self.dependencies) # type: ignore
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
except AttributeError:
pass # Target is not a Model, no classify
return target, target.__class__.__name__ # type: ignore
@@ -1529,10 +1621,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None):
:param str target_obj: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
:param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
"""
try:
return self(target_obj, data, content_type=content_type)
- except:
+ except: # pylint: disable=bare-except
_LOGGER.debug(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -1550,10 +1644,12 @@ def _unpack_content(raw_data, content_type=None):
If raw_data is something else, bypass all logic and return it directly.
- :param raw_data: Data to be processed.
- :param content_type: How to parse if raw_data is a string/bytes.
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
:raises JSONDecodeError: If JSON is requested and parsing is impossible.
:raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
"""
# Assume this is enough to detect a Pipeline Response without importing it
context = getattr(raw_data, "context", {})
@@ -1577,24 +1673,35 @@ def _unpack_content(raw_data, content_type=None):
def _instantiate_model(self, response, attrs, additional_properties=None):
"""Instantiate a response model passing in deserialized args.
- :param response: The response model class.
- :param d_attrs: The deserialized response attributes.
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
"""
if callable(response):
subtype = getattr(response, "_subtype_map", {})
try:
- readonly = [k for k, v in response._validation.items() if v.get("readonly")]
- const = [k for k, v in response._validation.items() if v.get("constant")]
+ readonly = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("readonly")
+ ]
+ const = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("constant")
+ ]
kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
response_obj = response(**kwargs)
for attr in readonly:
setattr(response_obj, attr, attrs.get(attr))
if additional_properties:
- response_obj.additional_properties = additional_properties
+ response_obj.additional_properties = additional_properties # type: ignore
return response_obj
except TypeError as err:
msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err))
+ raise DeserializationError(msg + str(err)) from err
else:
try:
for attr, value in attrs.items():
@@ -1603,15 +1710,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
except Exception as exp:
msg = "Unable to populate response model. "
msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg)
+ raise DeserializationError(msg) from exp
- def deserialize_data(self, data, data_type):
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
"""Process data for deserialization according to data type.
:param str data: The response string to be deserialized.
:param str data_type: The type to deserialize to.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
if data is None:
return data
@@ -1625,7 +1733,11 @@ def deserialize_data(self, data, data_type):
if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
return data
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
return None
data_val = self.deserialize_type[data_type](data)
@@ -1645,14 +1757,14 @@ def deserialize_data(self, data, data_type):
msg = "Unable to deserialize response data."
msg += " Data: {}, {}".format(data, data_type)
raise DeserializationError(msg) from err
- else:
- return self._deserialize(obj_type, data)
+ return self._deserialize(obj_type, data)
def deserialize_iter(self, attr, iter_type):
"""Deserialize an iterable.
:param list attr: Iterable to be deserialized.
:param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
:rtype: list
"""
if attr is None:
@@ -1669,6 +1781,7 @@ def deserialize_dict(self, attr, dict_type):
:param dict/list attr: Dictionary to be deserialized. Also accepts
a list of key, value pairs.
:param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
:rtype: dict
"""
if isinstance(attr, list):
@@ -1679,11 +1792,12 @@ def deserialize_dict(self, attr, dict_type):
attr = {el.tag: el.text for el in attr}
return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
- def deserialize_object(self, attr, **kwargs):
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Deserialize a generic object.
This will be handled as a dictionary.
:param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
:rtype: dict
:raises: TypeError if non-builtin datatype encountered.
"""
@@ -1718,11 +1832,10 @@ def deserialize_object(self, attr, **kwargs):
pass
return deserialized
- else:
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
- def deserialize_basic(self, attr, data_type):
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
"""Deserialize basic builtin data type from string.
Will attempt to convert to str, int, float and bool.
This function will also accept '1', '0', 'true' and 'false' as
@@ -1730,6 +1843,7 @@ def deserialize_basic(self, attr, data_type):
:param str attr: response string to be deserialized.
:param str data_type: deserialization data type.
+ :return: Deserialized basic type.
:rtype: str, int, float or bool
:raises: TypeError if string format is not valid.
"""
@@ -1741,24 +1855,23 @@ def deserialize_basic(self, attr, data_type):
if data_type == "str":
# None or '', node is empty string.
return ""
- else:
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
if data_type == "bool":
if attr in [True, False, 1, 0]:
return bool(attr)
- elif isinstance(attr, str):
+ if isinstance(attr, str):
if attr.lower() in ["true", "1"]:
return True
- elif attr.lower() in ["false", "0"]:
+ if attr.lower() in ["false", "0"]:
return False
raise TypeError("Invalid boolean value: {}".format(attr))
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
@staticmethod
def deserialize_unicode(data):
@@ -1766,6 +1879,7 @@ def deserialize_unicode(data):
as a string.
:param str data: response string to be deserialized.
+ :return: Deserialized string.
:rtype: str or unicode
"""
# We might be here because we have an enum modeled as string,
@@ -1779,8 +1893,7 @@ def deserialize_unicode(data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
@staticmethod
def deserialize_enum(data, enum_obj):
@@ -1792,6 +1905,7 @@ def deserialize_enum(data, enum_obj):
:param str data: Response string to be deserialized. If this value is
None or invalid it will be returned as-is.
:param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
:rtype: Enum
"""
if isinstance(data, enum_obj) or data is None:
@@ -1802,9 +1916,9 @@ def deserialize_enum(data, enum_obj):
# Workaround. We might consider remove it in the future.
try:
return list(enum_obj.__members__.values())[data]
- except IndexError:
+ except IndexError as exc:
error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj))
+ raise DeserializationError(error.format(data, enum_obj)) from exc
try:
return enum_obj(str(data))
except ValueError:
@@ -1820,6 +1934,7 @@ def deserialize_bytearray(attr):
"""Deserialize string into bytearray.
:param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1832,6 +1947,7 @@ def deserialize_base64(attr):
"""Deserialize base64 encoded string into string.
:param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1847,8 +1963,9 @@ def deserialize_decimal(attr):
"""Deserialize string into Decimal object.
:param str attr: response string to be deserialized.
- :rtype: Decimal
+ :return: Deserialized decimal
:raises: DeserializationError if string format invalid.
+ :rtype: decimal
"""
if isinstance(attr, ET.Element):
attr = attr.text
@@ -1863,6 +1980,7 @@ def deserialize_long(attr):
"""Deserialize string into long (Py2) or int (Py3).
:param str attr: response string to be deserialized.
+ :return: Deserialized int
:rtype: long or int
:raises: ValueError if string format invalid.
"""
@@ -1875,6 +1993,7 @@ def deserialize_duration(attr):
"""Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
+ :return: Deserialized duration
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
"""
@@ -1885,14 +2004,14 @@ def deserialize_duration(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
raise DeserializationError(msg) from err
- else:
- return duration
+ return duration
@staticmethod
def deserialize_date(attr):
"""Deserialize ISO-8601 formatted string into Date object.
:param str attr: response string to be deserialized.
+ :return: Deserialized date
:rtype: Date
:raises: DeserializationError if string format invalid.
"""
@@ -1908,6 +2027,7 @@ def deserialize_time(attr):
"""Deserialize ISO-8601 formatted string into time object.
:param str attr: response string to be deserialized.
+ :return: Deserialized time
:rtype: datetime.time
:raises: DeserializationError if string format invalid.
"""
@@ -1922,6 +2042,7 @@ def deserialize_rfc(attr):
"""Deserialize RFC-1123 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1937,14 +2058,14 @@ def deserialize_rfc(attr):
except ValueError as err:
msg = "Cannot deserialize to rfc datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_iso(attr):
"""Deserialize ISO-8601 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1974,8 +2095,7 @@ def deserialize_iso(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_unix(attr):
@@ -1983,6 +2103,7 @@ def deserialize_unix(attr):
This is represented as seconds.
:param int attr: Object to be serialized.
+ :return: Deserialized datetime
:rtype: Datetime
:raises: DeserializationError if format invalid
"""
@@ -1994,5 +2115,4 @@ def deserialize_unix(attr):
except ValueError as err:
msg = "Cannot deserialize to unix datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_storage_cache_management_client.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_storage_cache_management_client.py
index 7b8f69487296..9d97559d36ae 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_storage_cache_management_client.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_storage_cache_management_client.py
@@ -8,6 +8,7 @@
from copy import deepcopy
from typing import Any, TYPE_CHECKING
+from typing_extensions import Self
from azure.core.pipeline import policies
from azure.core.rest import HttpRequest, HttpResponse
@@ -21,6 +22,7 @@
AmlFilesystemsOperations,
AscOperationsOperations,
AscUsagesOperations,
+ AutoExportJobsOperations,
CachesOperations,
ImportJobsOperations,
Operations,
@@ -32,19 +34,20 @@
)
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class StorageCacheManagementClient(
StorageCacheManagementClientOperationsMixin
-): # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes
+): # pylint: disable=too-many-instance-attributes
"""Azure Managed Lustre provides a fully managed LustreĀ® file system, integrated with Blob
storage, for use on demand. These operations create and manage Azure Managed Lustre file
systems.
:ivar aml_filesystems: AmlFilesystemsOperations operations
:vartype aml_filesystems: azure.mgmt.storagecache.operations.AmlFilesystemsOperations
+ :ivar auto_export_jobs: AutoExportJobsOperations operations
+ :vartype auto_export_jobs: azure.mgmt.storagecache.operations.AutoExportJobsOperations
:ivar import_jobs: ImportJobsOperations operations
:vartype import_jobs: azure.mgmt.storagecache.operations.ImportJobsOperations
:ivar operations: Operations operations
@@ -69,7 +72,7 @@ class StorageCacheManagementClient(
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
- :keyword api_version: Api Version. Default value is "2024-03-01". Note that overriding this
+ :keyword api_version: Api Version. Default value is "2024-07-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
@@ -111,6 +114,7 @@ def __init__(
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.aml_filesystems = AmlFilesystemsOperations(self._client, self._config, self._serialize, self._deserialize)
+ self.auto_export_jobs = AutoExportJobsOperations(self._client, self._config, self._serialize, self._deserialize)
self.import_jobs = ImportJobsOperations(self._client, self._config, self._serialize, self._deserialize)
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.skus = SkusOperations(self._client, self._config, self._serialize, self._deserialize)
@@ -146,7 +150,7 @@ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs:
def close(self) -> None:
self._client.close()
- def __enter__(self) -> "StorageCacheManagementClient":
+ def __enter__(self) -> Self:
self._client.__enter__()
return self
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_vendor.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_vendor.py
index 6586138f68c0..9d16641c2b85 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_vendor.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_vendor.py
@@ -8,25 +8,14 @@
from abc import ABC
from typing import TYPE_CHECKING
-from azure.core.pipeline.transport import HttpRequest
-
from ._configuration import StorageCacheManagementClientConfiguration
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core import PipelineClient
from ._serialization import Deserializer, Serializer
-def _convert_request(request, files=None):
- data = request.content if not files else None
- request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data)
- if files:
- request.set_formdata_body(files)
- return request
-
-
class StorageCacheManagementClientMixinABC(ABC):
"""DO NOT use this class. It is for internal typing use only."""
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_version.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_version.py
index 48944bf3938a..e5754a47ce68 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_version.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/_version.py
@@ -6,4 +6,4 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-VERSION = "2.0.0"
+VERSION = "1.0.0b1"
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/__init__.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/__init__.py
index dd905c0e043c..95e8f4b57cc8 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/__init__.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/__init__.py
@@ -5,12 +5,18 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._storage_cache_management_client import StorageCacheManagementClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._storage_cache_management_client import StorageCacheManagementClient # type: ignore
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -18,6 +24,6 @@
__all__ = [
"StorageCacheManagementClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_configuration.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_configuration.py
index 193b1d9920c8..42f018b0a801 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_configuration.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_configuration.py
@@ -14,7 +14,6 @@
from .._version import VERSION
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
@@ -28,13 +27,13 @@ class StorageCacheManagementClientConfiguration: # pylint: disable=too-many-ins
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The ID of the target subscription. Required.
:type subscription_id: str
- :keyword api_version: Api Version. Default value is "2024-03-01". Note that overriding this
+ :keyword api_version: Api Version. Default value is "2024-07-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None:
- api_version: str = kwargs.pop("api_version", "2024-03-01")
+ api_version: str = kwargs.pop("api_version", "2024-07-01")
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_patch.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_patch.py
index f99e77fef986..17dbc073e01b 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_patch.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_patch.py
@@ -25,6 +25,7 @@
#
# --------------------------------------------------------------------------
+
# This file is used for handwritten extensions to the generated code. Example:
# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
def patch_sdk():
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_storage_cache_management_client.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_storage_cache_management_client.py
index 56fc5343cfbf..3be0d1a76791 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_storage_cache_management_client.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_storage_cache_management_client.py
@@ -8,6 +8,7 @@
from copy import deepcopy
from typing import Any, Awaitable, TYPE_CHECKING
+from typing_extensions import Self
from azure.core.pipeline import policies
from azure.core.rest import AsyncHttpResponse, HttpRequest
@@ -21,6 +22,7 @@
AmlFilesystemsOperations,
AscOperationsOperations,
AscUsagesOperations,
+ AutoExportJobsOperations,
CachesOperations,
ImportJobsOperations,
Operations,
@@ -32,19 +34,20 @@
)
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class StorageCacheManagementClient(
StorageCacheManagementClientOperationsMixin
-): # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes
+): # pylint: disable=too-many-instance-attributes
"""Azure Managed Lustre provides a fully managed LustreĀ® file system, integrated with Blob
storage, for use on demand. These operations create and manage Azure Managed Lustre file
systems.
:ivar aml_filesystems: AmlFilesystemsOperations operations
:vartype aml_filesystems: azure.mgmt.storagecache.aio.operations.AmlFilesystemsOperations
+ :ivar auto_export_jobs: AutoExportJobsOperations operations
+ :vartype auto_export_jobs: azure.mgmt.storagecache.aio.operations.AutoExportJobsOperations
:ivar import_jobs: ImportJobsOperations operations
:vartype import_jobs: azure.mgmt.storagecache.aio.operations.ImportJobsOperations
:ivar operations: Operations operations
@@ -69,7 +72,7 @@ class StorageCacheManagementClient(
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
- :keyword api_version: Api Version. Default value is "2024-03-01". Note that overriding this
+ :keyword api_version: Api Version. Default value is "2024-07-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
@@ -111,6 +114,7 @@ def __init__(
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.aml_filesystems = AmlFilesystemsOperations(self._client, self._config, self._serialize, self._deserialize)
+ self.auto_export_jobs = AutoExportJobsOperations(self._client, self._config, self._serialize, self._deserialize)
self.import_jobs = ImportJobsOperations(self._client, self._config, self._serialize, self._deserialize)
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.skus = SkusOperations(self._client, self._config, self._serialize, self._deserialize)
@@ -148,7 +152,7 @@ def _send_request(
async def close(self) -> None:
await self._client.close()
- async def __aenter__(self) -> "StorageCacheManagementClient":
+ async def __aenter__(self) -> Self:
await self._client.__aenter__()
return self
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_vendor.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_vendor.py
index ad73b146350a..512800f650c2 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_vendor.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/_vendor.py
@@ -8,12 +8,9 @@
from abc import ABC
from typing import TYPE_CHECKING
-from azure.core.pipeline.transport import HttpRequest
-
from ._configuration import StorageCacheManagementClientConfiguration
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core import AsyncPipelineClient
from .._serialization import Deserializer, Serializer
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/__init__.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/__init__.py
index 4ada12103750..321526fcb39e 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/__init__.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/__init__.py
@@ -5,25 +5,33 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._aml_filesystems_operations import AmlFilesystemsOperations
-from ._import_jobs_operations import ImportJobsOperations
-from ._storage_cache_management_client_operations import StorageCacheManagementClientOperationsMixin
-from ._operations import Operations
-from ._skus_operations import SkusOperations
-from ._usage_models_operations import UsageModelsOperations
-from ._asc_operations_operations import AscOperationsOperations
-from ._asc_usages_operations import AscUsagesOperations
-from ._caches_operations import CachesOperations
-from ._storage_targets_operations import StorageTargetsOperations
-from ._storage_target_operations import StorageTargetOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._aml_filesystems_operations import AmlFilesystemsOperations # type: ignore
+from ._auto_export_jobs_operations import AutoExportJobsOperations # type: ignore
+from ._import_jobs_operations import ImportJobsOperations # type: ignore
+from ._storage_cache_management_client_operations import StorageCacheManagementClientOperationsMixin # type: ignore
+from ._operations import Operations # type: ignore
+from ._skus_operations import SkusOperations # type: ignore
+from ._usage_models_operations import UsageModelsOperations # type: ignore
+from ._asc_operations_operations import AscOperationsOperations # type: ignore
+from ._asc_usages_operations import AscUsagesOperations # type: ignore
+from ._caches_operations import CachesOperations # type: ignore
+from ._storage_targets_operations import StorageTargetsOperations # type: ignore
+from ._storage_target_operations import StorageTargetOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"AmlFilesystemsOperations",
+ "AutoExportJobsOperations",
"ImportJobsOperations",
"StorageCacheManagementClientOperationsMixin",
"Operations",
@@ -35,5 +43,5 @@
"StorageTargetsOperations",
"StorageTargetOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_aml_filesystems_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_aml_filesystems_operations.py
index 0bf6a8a06ebf..bdfafb8ec1e0 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_aml_filesystems_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_aml_filesystems_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,8 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -17,12 +17,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -30,7 +31,6 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._aml_filesystems_operations import (
build_archive_request,
build_cancel_archive_request,
@@ -41,8 +41,11 @@
build_list_request,
build_update_request,
)
-from .._vendor import StorageCacheManagementClientMixinABC
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -80,7 +83,7 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.AmlFilesystem"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.AmlFilesystemsListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -97,7 +100,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -113,7 +115,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
@@ -159,7 +160,7 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.AmlFilesystemsListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -177,7 +178,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -193,7 +193,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
@@ -222,10 +221,10 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
+ async def _delete_initial(
self, resource_group_name: str, aml_filesystem_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -237,7 +236,7 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
_request = build_delete_request(
resource_group_name=resource_group_name,
@@ -247,10 +246,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -258,6 +257,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -268,8 +271,12 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(
@@ -296,7 +303,7 @@ async def begin_delete(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
aml_filesystem_name=aml_filesystem_name,
api_version=api_version,
@@ -305,6 +312,7 @@ async def begin_delete(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -342,7 +350,7 @@ async def get(self, resource_group_name: str, aml_filesystem_name: str, **kwargs
:rtype: ~azure.mgmt.storagecache.models.AmlFilesystem
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -364,7 +372,6 @@ async def get(self, resource_group_name: str, aml_filesystem_name: str, **kwargs
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -378,7 +385,7 @@ async def get(self, resource_group_name: str, aml_filesystem_name: str, **kwargs
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("AmlFilesystem", pipeline_response)
+ deserialized = self._deserialize("AmlFilesystem", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
@@ -391,8 +398,8 @@ async def _create_or_update_initial(
aml_filesystem_name: str,
aml_filesystem: Union[_models.AmlFilesystem, IO[bytes]],
**kwargs: Any
- ) -> _models.AmlFilesystem:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -405,7 +412,7 @@ async def _create_or_update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.AmlFilesystem] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -426,10 +433,10 @@ async def _create_or_update_initial(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -437,19 +444,20 @@ async def _create_or_update_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
- if response.status_code == 200:
- deserialized = self._deserialize("AmlFilesystem", pipeline_response)
-
if response.status_code == 201:
response_headers["azure-async-operation"] = self._deserialize(
"str", response.headers.get("azure-async-operation")
)
- deserialized = self._deserialize("AmlFilesystem", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -564,10 +572,11 @@ async def begin_create_or_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("AmlFilesystem", pipeline_response)
+ deserialized = self._deserialize("AmlFilesystem", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -598,8 +607,8 @@ async def _update_initial(
aml_filesystem_name: str,
aml_filesystem: Union[_models.AmlFilesystemUpdate, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.AmlFilesystem]:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -612,7 +621,7 @@ async def _update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.AmlFilesystem]] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -633,10 +642,10 @@ async def _update_initial(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -644,20 +653,22 @@ async def _update_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
- if response.status_code == 200:
- deserialized = self._deserialize("AmlFilesystem", pipeline_response)
-
if response.status_code == 202:
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["azure-async-operation"] = self._deserialize(
"str", response.headers.get("azure-async-operation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -771,10 +782,11 @@ async def begin_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("AmlFilesystem", pipeline_response)
+ deserialized = self._deserialize("AmlFilesystem", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -800,7 +812,7 @@ def get_long_running_output(pipeline_response):
)
@overload
- async def archive( # pylint: disable=inconsistent-return-statements
+ async def archive(
self,
resource_group_name: str,
aml_filesystem_name: str,
@@ -828,7 +840,7 @@ async def archive( # pylint: disable=inconsistent-return-statements
"""
@overload
- async def archive( # pylint: disable=inconsistent-return-statements
+ async def archive(
self,
resource_group_name: str,
aml_filesystem_name: str,
@@ -856,7 +868,7 @@ async def archive( # pylint: disable=inconsistent-return-statements
"""
@distributed_trace_async
- async def archive( # pylint: disable=inconsistent-return-statements
+ async def archive(
self,
resource_group_name: str,
aml_filesystem_name: str,
@@ -878,7 +890,7 @@ async def archive( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -915,7 +927,6 @@ async def archive( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -933,9 +944,7 @@ async def archive( # pylint: disable=inconsistent-return-statements
return cls(pipeline_response, None, {}) # type: ignore
@distributed_trace_async
- async def cancel_archive( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, aml_filesystem_name: str, **kwargs: Any
- ) -> None:
+ async def cancel_archive(self, resource_group_name: str, aml_filesystem_name: str, **kwargs: Any) -> None:
"""Cancel archiving data from the AML file system.
:param resource_group_name: The name of the resource group. The name is case insensitive.
@@ -948,7 +957,7 @@ async def cancel_archive( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -970,7 +979,6 @@ async def cancel_archive( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_asc_operations_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_asc_operations_operations.py
index 8d5609fe2003..06c1a71d1aa0 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_asc_operations_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_asc_operations_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, Callable, Dict, Optional, TypeVar
from azure.core.exceptions import (
@@ -17,17 +17,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._asc_operations_operations import build_get_request
-from .._vendor import StorageCacheManagementClientMixinABC
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -63,7 +64,7 @@ async def get(self, location: str, operation_id: str, **kwargs: Any) -> _models.
:rtype: ~azure.mgmt.storagecache.models.AscOperation
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -85,7 +86,6 @@ async def get(self, location: str, operation_id: str, **kwargs: Any) -> _models.
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -99,7 +99,7 @@ async def get(self, location: str, operation_id: str, **kwargs: Any) -> _models.
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("AscOperation", pipeline_response)
+ deserialized = self._deserialize("AscOperation", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_asc_usages_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_asc_usages_operations.py
index 7919e934c180..22ca1ae72512 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_asc_usages_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_asc_usages_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
import urllib.parse
@@ -19,17 +19,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._asc_usages_operations import build_list_request
-from .._vendor import StorageCacheManagementClientMixinABC
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -69,7 +70,7 @@ def list(self, location: str, **kwargs: Any) -> AsyncIterable["_models.ResourceU
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ResourceUsagesListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -87,7 +88,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -103,7 +103,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_auto_export_jobs_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_auto_export_jobs_operations.py
new file mode 100644
index 000000000000..308569a44e97
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_auto_export_jobs_operations.py
@@ -0,0 +1,786 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from io import IOBase
+import sys
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import urllib.parse
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.core.utils import case_insensitive_dict
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models as _models
+from ...operations._auto_export_jobs_operations import (
+ build_create_or_update_request,
+ build_delete_request,
+ build_get_request,
+ build_list_by_aml_filesystem_request,
+ build_update_request,
+)
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+
+class AutoExportJobsOperations:
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.mgmt.storagecache.aio.StorageCacheManagementClient`'s
+ :attr:`auto_export_jobs` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs) -> None:
+ input_args = list(args)
+ self._client = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ async def _delete_initial(
+ self, resource_group_name: str, aml_filesystem_name: str, auto_export_job_name: str, **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
+
+ _request = build_delete_request(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ auto_export_job_name=auto_export_job_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+ response_headers["Azure-AsyncOperation"] = self._deserialize(
+ "str", response.headers.get("Azure-AsyncOperation")
+ )
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace_async
+ async def begin_delete(
+ self, resource_group_name: str, aml_filesystem_name: str, auto_export_job_name: str, **kwargs: Any
+ ) -> AsyncLROPoller[None]:
+ """Schedules an auto export job for deletion.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ auto_export_job_name=auto_export_job_name,
+ api_version=api_version,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ await raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
+ if cls:
+ return cls(pipeline_response, None, {}) # type: ignore
+
+ if polling is True:
+ polling_method: AsyncPollingMethod = cast(
+ AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs)
+ )
+ elif polling is False:
+ polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return AsyncLROPoller[None].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
+
+ @distributed_trace_async
+ async def get(
+ self, resource_group_name: str, aml_filesystem_name: str, auto_export_job_name: str, **kwargs: Any
+ ) -> _models.AutoExportJob:
+ """Returns an auto export job.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :return: AutoExportJob or the result of cls(response)
+ :rtype: ~azure.mgmt.storagecache.models.AutoExportJob
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.AutoExportJob] = kwargs.pop("cls", None)
+
+ _request = build_get_request(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ auto_export_job_name=auto_export_job_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize("AutoExportJob", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: Union[_models.AutoExportJob, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(auto_export_job, (IOBase, bytes)):
+ _content = auto_export_job
+ else:
+ _json = self._serialize.body(auto_export_job, "AutoExportJob")
+
+ _request = build_create_or_update_request(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ auto_export_job_name=auto_export_job_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ content_type=content_type,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 201:
+ response_headers["azure-async-operation"] = self._deserialize(
+ "str", response.headers.get("azure-async-operation")
+ )
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: _models.AutoExportJob,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.AutoExportJob]:
+ """Create or update an auto export job.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :param auto_export_job: Object containing the user-selectable properties of the auto export
+ job. If read-only properties are included, they must match the existing values of those
+ properties. Required.
+ :type auto_export_job: ~azure.mgmt.storagecache.models.AutoExportJob
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns either AutoExportJob or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagecache.models.AutoExportJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.AutoExportJob]:
+ """Create or update an auto export job.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :param auto_export_job: Object containing the user-selectable properties of the auto export
+ job. If read-only properties are included, they must match the existing values of those
+ properties. Required.
+ :type auto_export_job: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns either AutoExportJob or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagecache.models.AutoExportJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace_async
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: Union[_models.AutoExportJob, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.AutoExportJob]:
+ """Create or update an auto export job.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :param auto_export_job: Object containing the user-selectable properties of the auto export
+ job. If read-only properties are included, they must match the existing values of those
+ properties. Is either a AutoExportJob type or a IO[bytes] type. Required.
+ :type auto_export_job: ~azure.mgmt.storagecache.models.AutoExportJob or IO[bytes]
+ :return: An instance of AsyncLROPoller that returns either AutoExportJob or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagecache.models.AutoExportJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.AutoExportJob] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ auto_export_job_name=auto_export_job_name,
+ auto_export_job=auto_export_job,
+ api_version=api_version,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ await raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize("AutoExportJob", pipeline_response.http_response)
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ if polling is True:
+ polling_method: AsyncPollingMethod = cast(
+ AsyncPollingMethod,
+ AsyncARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs),
+ )
+ elif polling is False:
+ polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return AsyncLROPoller[_models.AutoExportJob].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return AsyncLROPoller[_models.AutoExportJob](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ async def _update_initial(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: Union[_models.AutoExportJobUpdate, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(auto_export_job, (IOBase, bytes)):
+ _content = auto_export_job
+ else:
+ _json = self._serialize.body(auto_export_job, "AutoExportJobUpdate")
+
+ _request = build_update_request(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ auto_export_job_name=auto_export_job_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ content_type=content_type,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+ response_headers["azure-async-operation"] = self._deserialize(
+ "str", response.headers.get("azure-async-operation")
+ )
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: _models.AutoExportJobUpdate,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.AutoExportJob]:
+ """Update an auto export job instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :param auto_export_job: Object containing the user-selectable properties of the auto export
+ job. If read-only properties are included, they must match the existing values of those
+ properties. Required.
+ :type auto_export_job: ~azure.mgmt.storagecache.models.AutoExportJobUpdate
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns either AutoExportJob or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagecache.models.AutoExportJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.AutoExportJob]:
+ """Update an auto export job instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :param auto_export_job: Object containing the user-selectable properties of the auto export
+ job. If read-only properties are included, they must match the existing values of those
+ properties. Required.
+ :type auto_export_job: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of AsyncLROPoller that returns either AutoExportJob or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagecache.models.AutoExportJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace_async
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: Union[_models.AutoExportJobUpdate, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncLROPoller[_models.AutoExportJob]:
+ """Update an auto export job instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :param auto_export_job: Object containing the user-selectable properties of the auto export
+ job. If read-only properties are included, they must match the existing values of those
+ properties. Is either a AutoExportJobUpdate type or a IO[bytes] type. Required.
+ :type auto_export_job: ~azure.mgmt.storagecache.models.AutoExportJobUpdate or IO[bytes]
+ :return: An instance of AsyncLROPoller that returns either AutoExportJob or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storagecache.models.AutoExportJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.AutoExportJob] = kwargs.pop("cls", None)
+ polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = await self._update_initial(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ auto_export_job_name=auto_export_job_name,
+ auto_export_job=auto_export_job,
+ api_version=api_version,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ await raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize("AutoExportJob", pipeline_response.http_response)
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ if polling is True:
+ polling_method: AsyncPollingMethod = cast(
+ AsyncPollingMethod,
+ AsyncARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs),
+ )
+ elif polling is False:
+ polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return AsyncLROPoller[_models.AutoExportJob].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return AsyncLROPoller[_models.AutoExportJob](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ @distributed_trace
+ def list_by_aml_filesystem(
+ self, resource_group_name: str, aml_filesystem_name: str, **kwargs: Any
+ ) -> AsyncIterable["_models.AutoExportJob"]:
+ """Returns all the auto export jobs the user has access to under an AML File System.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :return: An iterator like instance of either AutoExportJob or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storagecache.models.AutoExportJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.AutoExportJobsListResult] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_list_by_aml_filesystem_request(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize("AutoExportJobsListResult", pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(get_next, extract_data)
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_caches_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_caches_operations.py
index c140b717105a..279e3648b616 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_caches_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_caches_operations.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines,too-many-statements
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +7,8 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -17,12 +18,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -30,7 +32,6 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._caches_operations import (
build_create_or_update_request,
build_debug_info_request,
@@ -49,8 +50,11 @@
build_update_request,
build_upgrade_firmware_request,
)
-from .._vendor import StorageCacheManagementClientMixinABC
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -88,7 +92,7 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Cache"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.CachesListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -105,7 +109,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -121,7 +124,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
@@ -167,7 +169,7 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.CachesListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -185,7 +187,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -201,7 +202,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
@@ -230,10 +230,8 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, cache_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ async def _delete_initial(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -245,7 +243,7 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
_request = build_delete_request(
resource_group_name=resource_group_name,
@@ -255,10 +253,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -266,6 +264,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -276,8 +278,12 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> AsyncLROPoller[None]:
@@ -302,7 +308,7 @@ async def begin_delete(self, resource_group_name: str, cache_name: str, **kwargs
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
api_version=api_version,
@@ -311,6 +317,7 @@ async def begin_delete(self, resource_group_name: str, cache_name: str, **kwargs
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -346,7 +353,7 @@ async def get(self, resource_group_name: str, cache_name: str, **kwargs: Any) ->
:rtype: ~azure.mgmt.storagecache.models.Cache
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -368,7 +375,6 @@ async def get(self, resource_group_name: str, cache_name: str, **kwargs: Any) ->
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -382,7 +388,7 @@ async def get(self, resource_group_name: str, cache_name: str, **kwargs: Any) ->
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("Cache", pipeline_response)
+ deserialized = self._deserialize("Cache", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
@@ -391,8 +397,8 @@ async def get(self, resource_group_name: str, cache_name: str, **kwargs: Any) ->
async def _create_or_update_initial(
self, resource_group_name: str, cache_name: str, cache: Union[_models.Cache, IO[bytes]], **kwargs: Any
- ) -> Optional[_models.Cache]:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -405,7 +411,7 @@ async def _create_or_update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.Cache]] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -426,10 +432,10 @@ async def _create_or_update_initial(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -437,15 +443,14 @@ async def _create_or_update_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize("Cache", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("Cache", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
@@ -551,10 +556,11 @@ async def begin_create_or_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("Cache", pipeline_response)
+ deserialized = self._deserialize("Cache", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -582,8 +588,8 @@ async def _update_initial(
cache_name: str,
cache: Optional[Union[_models.Cache, IO[bytes]]] = None,
**kwargs: Any
- ) -> Optional[_models.Cache]:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -596,7 +602,7 @@ async def _update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.Cache]] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -620,10 +626,10 @@ async def _update_initial(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -631,20 +637,22 @@ async def _update_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
- if response.status_code == 200:
- deserialized = self._deserialize("Cache", pipeline_response)
-
if response.status_code == 202:
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -755,10 +763,11 @@ async def begin_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("Cache", pipeline_response)
+ deserialized = self._deserialize("Cache", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -783,10 +792,10 @@ def get_long_running_output(pipeline_response):
self._client, raw_result, get_long_running_output, polling_method # type: ignore
)
- async def _debug_info_initial( # pylint: disable=inconsistent-return-statements
+ async def _debug_info_initial(
self, resource_group_name: str, cache_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -798,7 +807,7 @@ async def _debug_info_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
_request = build_debug_info_request(
resource_group_name=resource_group_name,
@@ -808,10 +817,10 @@ async def _debug_info_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -819,6 +828,10 @@ async def _debug_info_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -829,8 +842,12 @@ async def _debug_info_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_debug_info(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> AsyncLROPoller[None]:
@@ -855,7 +872,7 @@ async def begin_debug_info(self, resource_group_name: str, cache_name: str, **kw
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._debug_info_initial( # type: ignore
+ raw_result = await self._debug_info_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
api_version=api_version,
@@ -864,6 +881,7 @@ async def begin_debug_info(self, resource_group_name: str, cache_name: str, **kw
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -888,10 +906,8 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _flush_initial( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, cache_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ async def _flush_initial(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -903,7 +919,7 @@ async def _flush_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
_request = build_flush_request(
resource_group_name=resource_group_name,
@@ -913,10 +929,10 @@ async def _flush_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -924,6 +940,10 @@ async def _flush_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -934,8 +954,12 @@ async def _flush_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_flush(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> AsyncLROPoller[None]:
@@ -961,7 +985,7 @@ async def begin_flush(self, resource_group_name: str, cache_name: str, **kwargs:
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._flush_initial( # type: ignore
+ raw_result = await self._flush_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
api_version=api_version,
@@ -970,6 +994,7 @@ async def begin_flush(self, resource_group_name: str, cache_name: str, **kwargs:
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -994,10 +1019,8 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _start_initial( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, cache_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ async def _start_initial(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1009,7 +1032,7 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
_request = build_start_request(
resource_group_name=resource_group_name,
@@ -1019,10 +1042,10 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -1030,6 +1053,10 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -1040,8 +1067,12 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_start(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> AsyncLROPoller[None]:
@@ -1066,7 +1097,7 @@ async def begin_start(self, resource_group_name: str, cache_name: str, **kwargs:
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._start_initial( # type: ignore
+ raw_result = await self._start_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
api_version=api_version,
@@ -1075,6 +1106,7 @@ async def begin_start(self, resource_group_name: str, cache_name: str, **kwargs:
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -1099,10 +1131,8 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _stop_initial( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, cache_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ async def _stop_initial(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1114,7 +1144,7 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
_request = build_stop_request(
resource_group_name=resource_group_name,
@@ -1124,10 +1154,10 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -1135,6 +1165,10 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -1145,8 +1179,12 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_stop(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> AsyncLROPoller[None]:
@@ -1171,7 +1209,7 @@ async def begin_stop(self, resource_group_name: str, cache_name: str, **kwargs:
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._stop_initial( # type: ignore
+ raw_result = await self._stop_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
api_version=api_version,
@@ -1180,6 +1218,7 @@ async def begin_stop(self, resource_group_name: str, cache_name: str, **kwargs:
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -1204,14 +1243,14 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _start_priming_job_initial( # pylint: disable=inconsistent-return-statements
+ async def _start_priming_job_initial(
self,
resource_group_name: str,
cache_name: str,
primingjob: Optional[Union[_models.PrimingJob, IO[bytes]]] = None,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1224,7 +1263,7 @@ async def _start_priming_job_initial( # pylint: disable=inconsistent-return-sta
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -1248,10 +1287,10 @@ async def _start_priming_job_initial( # pylint: disable=inconsistent-return-sta
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -1259,6 +1298,10 @@ async def _start_priming_job_initial( # pylint: disable=inconsistent-return-sta
response = pipeline_response.http_response
if response.status_code not in [202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -1268,8 +1311,12 @@ async def _start_priming_job_initial( # pylint: disable=inconsistent-return-sta
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@overload
async def begin_start_priming_job(
@@ -1360,7 +1407,7 @@ async def begin_start_priming_job(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._start_priming_job_initial( # type: ignore
+ raw_result = await self._start_priming_job_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
primingjob=primingjob,
@@ -1371,6 +1418,7 @@ async def begin_start_priming_job(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -1395,14 +1443,14 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _stop_priming_job_initial( # pylint: disable=inconsistent-return-statements
+ async def _stop_priming_job_initial(
self,
resource_group_name: str,
cache_name: str,
priming_job_id: Optional[Union[_models.PrimingJobIdParameter, IO[bytes]]] = None,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1415,7 +1463,7 @@ async def _stop_priming_job_initial( # pylint: disable=inconsistent-return-stat
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -1439,10 +1487,10 @@ async def _stop_priming_job_initial( # pylint: disable=inconsistent-return-stat
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -1450,6 +1498,10 @@ async def _stop_priming_job_initial( # pylint: disable=inconsistent-return-stat
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -1460,8 +1512,12 @@ async def _stop_priming_job_initial( # pylint: disable=inconsistent-return-stat
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@overload
async def begin_stop_priming_job(
@@ -1552,7 +1608,7 @@ async def begin_stop_priming_job(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._stop_priming_job_initial( # type: ignore
+ raw_result = await self._stop_priming_job_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
priming_job_id=priming_job_id,
@@ -1563,6 +1619,7 @@ async def begin_stop_priming_job(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -1587,14 +1644,14 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _pause_priming_job_initial( # pylint: disable=inconsistent-return-statements
+ async def _pause_priming_job_initial(
self,
resource_group_name: str,
cache_name: str,
priming_job_id: Optional[Union[_models.PrimingJobIdParameter, IO[bytes]]] = None,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1607,7 +1664,7 @@ async def _pause_priming_job_initial( # pylint: disable=inconsistent-return-sta
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -1631,10 +1688,10 @@ async def _pause_priming_job_initial( # pylint: disable=inconsistent-return-sta
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -1642,6 +1699,10 @@ async def _pause_priming_job_initial( # pylint: disable=inconsistent-return-sta
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -1652,8 +1713,12 @@ async def _pause_priming_job_initial( # pylint: disable=inconsistent-return-sta
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@overload
async def begin_pause_priming_job(
@@ -1744,7 +1809,7 @@ async def begin_pause_priming_job(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._pause_priming_job_initial( # type: ignore
+ raw_result = await self._pause_priming_job_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
priming_job_id=priming_job_id,
@@ -1755,6 +1820,7 @@ async def begin_pause_priming_job(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -1779,14 +1845,14 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _resume_priming_job_initial( # pylint: disable=inconsistent-return-statements
+ async def _resume_priming_job_initial(
self,
resource_group_name: str,
cache_name: str,
priming_job_id: Optional[Union[_models.PrimingJobIdParameter, IO[bytes]]] = None,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1799,7 +1865,7 @@ async def _resume_priming_job_initial( # pylint: disable=inconsistent-return-st
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -1823,10 +1889,10 @@ async def _resume_priming_job_initial( # pylint: disable=inconsistent-return-st
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -1834,6 +1900,10 @@ async def _resume_priming_job_initial( # pylint: disable=inconsistent-return-st
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -1844,8 +1914,12 @@ async def _resume_priming_job_initial( # pylint: disable=inconsistent-return-st
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@overload
async def begin_resume_priming_job(
@@ -1936,7 +2010,7 @@ async def begin_resume_priming_job(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._resume_priming_job_initial( # type: ignore
+ raw_result = await self._resume_priming_job_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
priming_job_id=priming_job_id,
@@ -1947,6 +2021,7 @@ async def begin_resume_priming_job(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -1971,10 +2046,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _upgrade_firmware_initial( # pylint: disable=inconsistent-return-statements
+ async def _upgrade_firmware_initial(
self, resource_group_name: str, cache_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1986,7 +2061,7 @@ async def _upgrade_firmware_initial( # pylint: disable=inconsistent-return-stat
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
_request = build_upgrade_firmware_request(
resource_group_name=resource_group_name,
@@ -1996,10 +2071,10 @@ async def _upgrade_firmware_initial( # pylint: disable=inconsistent-return-stat
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -2007,6 +2082,10 @@ async def _upgrade_firmware_initial( # pylint: disable=inconsistent-return-stat
response = pipeline_response.http_response
if response.status_code not in [201, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -2017,8 +2096,12 @@ async def _upgrade_firmware_initial( # pylint: disable=inconsistent-return-stat
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_upgrade_firmware(
@@ -2046,7 +2129,7 @@ async def begin_upgrade_firmware(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._upgrade_firmware_initial( # type: ignore
+ raw_result = await self._upgrade_firmware_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
api_version=api_version,
@@ -2055,6 +2138,7 @@ async def begin_upgrade_firmware(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -2079,14 +2163,14 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _space_allocation_initial( # pylint: disable=inconsistent-return-statements
+ async def _space_allocation_initial(
self,
resource_group_name: str,
cache_name: str,
space_allocation: Optional[Union[List[_models.StorageTargetSpaceAllocation], IO[bytes]]] = None,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2099,7 +2183,7 @@ async def _space_allocation_initial( # pylint: disable=inconsistent-return-stat
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -2123,10 +2207,10 @@ async def _space_allocation_initial( # pylint: disable=inconsistent-return-stat
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -2134,6 +2218,10 @@ async def _space_allocation_initial( # pylint: disable=inconsistent-return-stat
response = pipeline_response.http_response
if response.status_code not in [202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -2143,8 +2231,12 @@ async def _space_allocation_initial( # pylint: disable=inconsistent-return-stat
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@overload
async def begin_space_allocation(
@@ -2238,7 +2330,7 @@ async def begin_space_allocation(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._space_allocation_initial( # type: ignore
+ raw_result = await self._space_allocation_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
space_allocation=space_allocation,
@@ -2249,6 +2341,7 @@ async def begin_space_allocation(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_import_jobs_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_import_jobs_operations.py
index 0cac41cd72cf..cadbb2521cdb 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_import_jobs_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_import_jobs_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,8 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -17,12 +17,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -30,7 +31,6 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._import_jobs_operations import (
build_create_or_update_request,
build_delete_request,
@@ -38,8 +38,11 @@
build_list_by_aml_filesystem_request,
build_update_request,
)
-from .._vendor import StorageCacheManagementClientMixinABC
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -63,10 +66,10 @@ def __init__(self, *args, **kwargs) -> None:
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
+ async def _delete_initial(
self, resource_group_name: str, aml_filesystem_name: str, import_job_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -78,7 +81,7 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
_request = build_delete_request(
resource_group_name=resource_group_name,
@@ -89,10 +92,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -100,6 +103,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -111,8 +118,12 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(
@@ -142,7 +153,7 @@ async def begin_delete(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
aml_filesystem_name=aml_filesystem_name,
import_job_name=import_job_name,
@@ -152,6 +163,7 @@ async def begin_delete(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -194,7 +206,7 @@ async def get(
:rtype: ~azure.mgmt.storagecache.models.ImportJob
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -217,7 +229,6 @@ async def get(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -232,7 +243,7 @@ async def get(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ImportJob", pipeline_response)
+ deserialized = self._deserialize("ImportJob", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
@@ -246,8 +257,8 @@ async def _create_or_update_initial(
import_job_name: str,
import_job: Union[_models.ImportJob, IO[bytes]],
**kwargs: Any
- ) -> _models.ImportJob:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -260,7 +271,7 @@ async def _create_or_update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.ImportJob] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -282,10 +293,10 @@ async def _create_or_update_initial(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -293,20 +304,21 @@ async def _create_or_update_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
- if response.status_code == 200:
- deserialized = self._deserialize("ImportJob", pipeline_response)
-
if response.status_code == 201:
response_headers["azure-async-operation"] = self._deserialize(
"str", response.headers.get("azure-async-operation")
)
- deserialized = self._deserialize("ImportJob", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -324,8 +336,7 @@ async def begin_create_or_update(
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.ImportJob]:
- """Create or update an import job. Import jobs are automatically deleted 72 hours after
- completion.
+ """Create or update an import job.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
@@ -360,8 +371,7 @@ async def begin_create_or_update(
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.ImportJob]:
- """Create or update an import job. Import jobs are automatically deleted 72 hours after
- completion.
+ """Create or update an import job.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
@@ -394,8 +404,7 @@ async def begin_create_or_update(
import_job: Union[_models.ImportJob, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.ImportJob]:
- """Create or update an import job. Import jobs are automatically deleted 72 hours after
- completion.
+ """Create or update an import job.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
@@ -437,10 +446,11 @@ async def begin_create_or_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("ImportJob", pipeline_response)
+ deserialized = self._deserialize("ImportJob", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -472,8 +482,8 @@ async def _update_initial(
import_job_name: str,
import_job: Union[_models.ImportJobUpdate, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.ImportJob]:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -486,7 +496,7 @@ async def _update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.ImportJob]] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -508,10 +518,10 @@ async def _update_initial(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -519,21 +529,23 @@ async def _update_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
- if response.status_code == 200:
- deserialized = self._deserialize("ImportJob", pipeline_response)
-
if response.status_code == 202:
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["azure-async-operation"] = self._deserialize(
"str", response.headers.get("azure-async-operation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -660,10 +672,11 @@ async def begin_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("ImportJob", pipeline_response)
+ deserialized = self._deserialize("ImportJob", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -710,7 +723,7 @@ def list_by_aml_filesystem(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ImportJobsListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -729,7 +742,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -745,7 +757,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_operations.py
index ec2c150caa7a..34d4b9a84e92 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
import urllib.parse
@@ -19,17 +19,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._operations import build_list_request
-from .._vendor import StorageCacheManagementClientMixinABC
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -67,7 +68,7 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.ApiOperation"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ApiOperationListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -83,7 +84,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -99,7 +99,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_skus_operations.py
index 6bed3c35a7e0..a31ec287a7dd 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_skus_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_skus_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
import urllib.parse
@@ -19,17 +19,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._skus_operations import build_list_request
-from .._vendor import StorageCacheManagementClientMixinABC
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -67,7 +68,7 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.ResourceSku"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ResourceSkusResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -84,7 +85,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -100,7 +100,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_storage_cache_management_client_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_storage_cache_management_client_operations.py
index a6904367825f..a558b34b3611 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_storage_cache_management_client_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_storage_cache_management_client_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,6 +6,7 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
+import sys
from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
@@ -18,20 +18,22 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._storage_cache_management_client_operations import (
build_check_aml_fs_subnets_request,
build_get_required_aml_fs_subnets_size_request,
)
from .._vendor import StorageCacheManagementClientMixinABC
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -39,8 +41,9 @@
class StorageCacheManagementClientOperationsMixin( # pylint: disable=name-too-long
StorageCacheManagementClientMixinABC
):
+
@overload
- async def check_aml_fs_subnets( # pylint: disable=inconsistent-return-statements
+ async def check_aml_fs_subnets(
self,
aml_filesystem_subnet_info: Optional[_models.AmlFilesystemSubnetInfo] = None,
*,
@@ -61,7 +64,7 @@ async def check_aml_fs_subnets( # pylint: disable=inconsistent-return-statement
"""
@overload
- async def check_aml_fs_subnets( # pylint: disable=inconsistent-return-statements
+ async def check_aml_fs_subnets(
self,
aml_filesystem_subnet_info: Optional[IO[bytes]] = None,
*,
@@ -82,7 +85,7 @@ async def check_aml_fs_subnets( # pylint: disable=inconsistent-return-statement
"""
@distributed_trace_async
- async def check_aml_fs_subnets( # pylint: disable=inconsistent-return-statements
+ async def check_aml_fs_subnets(
self,
aml_filesystem_subnet_info: Optional[Union[_models.AmlFilesystemSubnetInfo, IO[bytes]]] = None,
**kwargs: Any
@@ -97,16 +100,11 @@ async def check_aml_fs_subnets( # pylint: disable=inconsistent-return-statement
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
- 400: lambda response: HttpResponseError(
- response=response,
- model=self._deserialize(_models.AmlFilesystemCheckSubnetError, response),
- error_format=ARMErrorFormat,
- ),
}
error_map.update(kwargs.pop("error_map", {}) or {})
@@ -137,7 +135,6 @@ async def check_aml_fs_subnets( # pylint: disable=inconsistent-return-statement
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -149,7 +146,10 @@ async def check_aml_fs_subnets( # pylint: disable=inconsistent-return-statement
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ error = None
+ if response.status_code == 400:
+ error = self._deserialize.failsafe_deserialize(_models.AmlFilesystemCheckSubnetError, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {}) # type: ignore
@@ -216,7 +216,7 @@ async def get_required_aml_fs_subnets_size(
:rtype: ~azure.mgmt.storagecache.models.RequiredAmlFilesystemSubnetsSize
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -253,7 +253,6 @@ async def get_required_aml_fs_subnets_size(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -267,7 +266,7 @@ async def get_required_aml_fs_subnets_size(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("RequiredAmlFilesystemSubnetsSize", pipeline_response)
+ deserialized = self._deserialize("RequiredAmlFilesystemSubnetsSize", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_storage_target_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_storage_target_operations.py
index 0effdef4083d..3d91d77ce1c4 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_storage_target_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_storage_target_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,7 +5,8 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Optional, TypeVar, Union, cast
+import sys
+from typing import Any, AsyncIterator, Callable, Dict, Optional, TypeVar, Union, cast
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -14,27 +14,30 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._storage_target_operations import (
build_flush_request,
build_invalidate_request,
build_resume_request,
build_suspend_request,
)
-from .._vendor import StorageCacheManagementClientMixinABC
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -58,10 +61,10 @@ def __init__(self, *args, **kwargs) -> None:
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
- async def _flush_initial( # pylint: disable=inconsistent-return-statements
+ async def _flush_initial(
self, resource_group_name: str, cache_name: str, storage_target_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -73,7 +76,7 @@ async def _flush_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
_request = build_flush_request(
resource_group_name=resource_group_name,
@@ -84,10 +87,10 @@ async def _flush_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -95,6 +98,10 @@ async def _flush_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -105,8 +112,12 @@ async def _flush_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_flush(
@@ -137,7 +148,7 @@ async def begin_flush(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._flush_initial( # type: ignore
+ raw_result = await self._flush_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
storage_target_name=storage_target_name,
@@ -147,6 +158,7 @@ async def begin_flush(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -171,10 +183,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _suspend_initial( # pylint: disable=inconsistent-return-statements
+ async def _suspend_initial(
self, resource_group_name: str, cache_name: str, storage_target_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -186,7 +198,7 @@ async def _suspend_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
_request = build_suspend_request(
resource_group_name=resource_group_name,
@@ -197,10 +209,10 @@ async def _suspend_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -208,6 +220,10 @@ async def _suspend_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -218,8 +234,12 @@ async def _suspend_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_suspend(
@@ -248,7 +268,7 @@ async def begin_suspend(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._suspend_initial( # type: ignore
+ raw_result = await self._suspend_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
storage_target_name=storage_target_name,
@@ -258,6 +278,7 @@ async def begin_suspend(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -282,10 +303,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _resume_initial( # pylint: disable=inconsistent-return-statements
+ async def _resume_initial(
self, resource_group_name: str, cache_name: str, storage_target_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -297,7 +318,7 @@ async def _resume_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
_request = build_resume_request(
resource_group_name=resource_group_name,
@@ -308,10 +329,10 @@ async def _resume_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -319,6 +340,10 @@ async def _resume_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -329,8 +354,12 @@ async def _resume_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_resume(
@@ -359,7 +388,7 @@ async def begin_resume(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._resume_initial( # type: ignore
+ raw_result = await self._resume_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
storage_target_name=storage_target_name,
@@ -369,6 +398,7 @@ async def begin_resume(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -393,10 +423,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- async def _invalidate_initial( # pylint: disable=inconsistent-return-statements
+ async def _invalidate_initial(
self, resource_group_name: str, cache_name: str, storage_target_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -408,7 +438,7 @@ async def _invalidate_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
_request = build_invalidate_request(
resource_group_name=resource_group_name,
@@ -419,10 +449,10 @@ async def _invalidate_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -430,6 +460,10 @@ async def _invalidate_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -440,8 +474,12 @@ async def _invalidate_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_invalidate(
@@ -471,7 +509,7 @@ async def begin_invalidate(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._invalidate_initial( # type: ignore
+ raw_result = await self._invalidate_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
storage_target_name=storage_target_name,
@@ -481,6 +519,7 @@ async def begin_invalidate(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_storage_targets_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_storage_targets_operations.py
index 0a1b4b28b08c..075f886b5af9 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_storage_targets_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_storage_targets_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,8 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -17,12 +17,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -30,7 +31,6 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._storage_targets_operations import (
build_create_or_update_request,
build_delete_request,
@@ -39,8 +39,11 @@
build_list_by_cache_request,
build_restore_defaults_request,
)
-from .._vendor import StorageCacheManagementClientMixinABC
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -64,10 +67,10 @@ def __init__(self, *args, **kwargs) -> None:
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
- async def _dns_refresh_initial( # pylint: disable=inconsistent-return-statements
+ async def _dns_refresh_initial(
self, resource_group_name: str, cache_name: str, storage_target_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -79,7 +82,7 @@ async def _dns_refresh_initial( # pylint: disable=inconsistent-return-statement
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
_request = build_dns_refresh_request(
resource_group_name=resource_group_name,
@@ -90,10 +93,10 @@ async def _dns_refresh_initial( # pylint: disable=inconsistent-return-statement
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -101,6 +104,10 @@ async def _dns_refresh_initial( # pylint: disable=inconsistent-return-statement
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -111,8 +118,12 @@ async def _dns_refresh_initial( # pylint: disable=inconsistent-return-statement
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_dns_refresh(
@@ -141,7 +152,7 @@ async def begin_dns_refresh(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._dns_refresh_initial( # type: ignore
+ raw_result = await self._dns_refresh_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
storage_target_name=storage_target_name,
@@ -151,6 +162,7 @@ async def begin_dns_refresh(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -197,7 +209,7 @@ def list_by_cache(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.StorageTargetsResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -216,7 +228,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -232,7 +243,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
@@ -261,15 +271,15 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
+ async def _delete_initial(
self,
resource_group_name: str,
cache_name: str,
storage_target_name: str,
force: Optional[str] = None,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -281,7 +291,7 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
_request = build_delete_request(
resource_group_name=resource_group_name,
@@ -293,10 +303,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -304,6 +314,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -314,8 +328,12 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(
@@ -356,7 +374,7 @@ async def begin_delete(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
storage_target_name=storage_target_name,
@@ -367,6 +385,7 @@ async def begin_delete(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -406,7 +425,7 @@ async def get(
:rtype: ~azure.mgmt.storagecache.models.StorageTarget
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -429,7 +448,6 @@ async def get(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -443,7 +461,7 @@ async def get(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("StorageTarget", pipeline_response)
+ deserialized = self._deserialize("StorageTarget", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
@@ -457,8 +475,8 @@ async def _create_or_update_initial(
storage_target_name: str,
storagetarget: Union[_models.StorageTarget, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.StorageTarget]:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -471,7 +489,7 @@ async def _create_or_update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.StorageTarget]] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -493,10 +511,10 @@ async def _create_or_update_initial(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -504,15 +522,14 @@ async def _create_or_update_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize("StorageTarget", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("StorageTarget", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
@@ -638,10 +655,11 @@ async def begin_create_or_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("StorageTarget", pipeline_response)
+ deserialized = self._deserialize("StorageTarget", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -663,10 +681,10 @@ def get_long_running_output(pipeline_response):
self._client, raw_result, get_long_running_output, polling_method # type: ignore
)
- async def _restore_defaults_initial( # pylint: disable=inconsistent-return-statements
+ async def _restore_defaults_initial(
self, resource_group_name: str, cache_name: str, storage_target_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -678,7 +696,7 @@ async def _restore_defaults_initial( # pylint: disable=inconsistent-return-stat
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
_request = build_restore_defaults_request(
resource_group_name=resource_group_name,
@@ -689,10 +707,10 @@ async def _restore_defaults_initial( # pylint: disable=inconsistent-return-stat
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -700,6 +718,10 @@ async def _restore_defaults_initial( # pylint: disable=inconsistent-return-stat
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -710,8 +732,12 @@ async def _restore_defaults_initial( # pylint: disable=inconsistent-return-stat
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_restore_defaults(
@@ -740,7 +766,7 @@ async def begin_restore_defaults(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._restore_defaults_initial( # type: ignore
+ raw_result = await self._restore_defaults_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
storage_target_name=storage_target_name,
@@ -750,6 +776,7 @@ async def begin_restore_defaults(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_usage_models_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_usage_models_operations.py
index 6d933fb61ff3..36f8b8608c5b 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_usage_models_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/aio/operations/_usage_models_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
import urllib.parse
@@ -19,17 +19,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._usage_models_operations import build_list_request
-from .._vendor import StorageCacheManagementClientMixinABC
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -67,7 +68,7 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.UsageModel"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.UsageModelsResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -84,7 +85,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -100,7 +100,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/models/__init__.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/models/__init__.py
index b70d47c756b8..4bf2f6dcfce7 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/models/__init__.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/models/__init__.py
@@ -5,120 +5,138 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._models_py3 import AmlFilesystem
-from ._models_py3 import AmlFilesystemArchive
-from ._models_py3 import AmlFilesystemArchiveInfo
-from ._models_py3 import AmlFilesystemArchiveStatus
-from ._models_py3 import AmlFilesystemCheckSubnetError
-from ._models_py3 import AmlFilesystemCheckSubnetErrorFilesystemSubnet
-from ._models_py3 import AmlFilesystemClientInfo
-from ._models_py3 import AmlFilesystemContainerStorageInterface
-from ._models_py3 import AmlFilesystemEncryptionSettings
-from ._models_py3 import AmlFilesystemHealth
-from ._models_py3 import AmlFilesystemHsmSettings
-from ._models_py3 import AmlFilesystemIdentity
-from ._models_py3 import AmlFilesystemPropertiesHsm
-from ._models_py3 import AmlFilesystemPropertiesMaintenanceWindow
-from ._models_py3 import AmlFilesystemRootSquashSettings
-from ._models_py3 import AmlFilesystemSubnetInfo
-from ._models_py3 import AmlFilesystemUpdate
-from ._models_py3 import AmlFilesystemUpdatePropertiesMaintenanceWindow
-from ._models_py3 import AmlFilesystemsListResult
-from ._models_py3 import ApiOperation
-from ._models_py3 import ApiOperationDisplay
-from ._models_py3 import ApiOperationListResult
-from ._models_py3 import ApiOperationPropertiesServiceSpecification
-from ._models_py3 import AscOperation
-from ._models_py3 import AscOperationErrorResponse
-from ._models_py3 import BlobNfsTarget
-from ._models_py3 import Cache
-from ._models_py3 import CacheActiveDirectorySettings
-from ._models_py3 import CacheActiveDirectorySettingsCredentials
-from ._models_py3 import CacheDirectorySettings
-from ._models_py3 import CacheEncryptionSettings
-from ._models_py3 import CacheHealth
-from ._models_py3 import CacheIdentity
-from ._models_py3 import CacheNetworkSettings
-from ._models_py3 import CacheSecuritySettings
-from ._models_py3 import CacheSku
-from ._models_py3 import CacheUpgradeSettings
-from ._models_py3 import CacheUpgradeStatus
-from ._models_py3 import CacheUsernameDownloadSettings
-from ._models_py3 import CacheUsernameDownloadSettingsCredentials
-from ._models_py3 import CachesListResult
-from ._models_py3 import ClfsTarget
-from ._models_py3 import CloudErrorBody
-from ._models_py3 import Condition
-from ._models_py3 import ErrorAdditionalInfo
-from ._models_py3 import ErrorDetail
-from ._models_py3 import ErrorResponse
-from ._models_py3 import ImportJob
-from ._models_py3 import ImportJobUpdate
-from ._models_py3 import ImportJobsListResult
-from ._models_py3 import KeyVaultKeyReference
-from ._models_py3 import KeyVaultKeyReferenceSourceVault
-from ._models_py3 import LogSpecification
-from ._models_py3 import MetricDimension
-from ._models_py3 import MetricSpecification
-from ._models_py3 import NamespaceJunction
-from ._models_py3 import Nfs3Target
-from ._models_py3 import NfsAccessPolicy
-from ._models_py3 import NfsAccessRule
-from ._models_py3 import PrimingJob
-from ._models_py3 import PrimingJobIdParameter
-from ._models_py3 import RequiredAmlFilesystemSubnetsSize
-from ._models_py3 import RequiredAmlFilesystemSubnetsSizeInfo
-from ._models_py3 import Resource
-from ._models_py3 import ResourceSku
-from ._models_py3 import ResourceSkuCapabilities
-from ._models_py3 import ResourceSkuLocationInfo
-from ._models_py3 import ResourceSkusResult
-from ._models_py3 import ResourceUsage
-from ._models_py3 import ResourceUsageName
-from ._models_py3 import ResourceUsagesListResult
-from ._models_py3 import Restriction
-from ._models_py3 import SkuName
-from ._models_py3 import StorageTarget
-from ._models_py3 import StorageTargetResource
-from ._models_py3 import StorageTargetSpaceAllocation
-from ._models_py3 import StorageTargetsResult
-from ._models_py3 import SystemData
-from ._models_py3 import TrackedResource
-from ._models_py3 import UnknownTarget
-from ._models_py3 import UsageModel
-from ._models_py3 import UsageModelDisplay
-from ._models_py3 import UsageModelsResult
-from ._models_py3 import UserAssignedIdentitiesValue
-from ._models_py3 import UserAssignedIdentitiesValueAutoGenerated
+from typing import TYPE_CHECKING
-from ._storage_cache_management_client_enums import AmlFilesystemHealthStateType
-from ._storage_cache_management_client_enums import AmlFilesystemIdentityType
-from ._storage_cache_management_client_enums import AmlFilesystemProvisioningStateType
-from ._storage_cache_management_client_enums import AmlFilesystemSquashMode
-from ._storage_cache_management_client_enums import ArchiveStatusType
-from ._storage_cache_management_client_enums import CacheIdentityType
-from ._storage_cache_management_client_enums import ConflictResolutionMode
-from ._storage_cache_management_client_enums import CreatedByType
-from ._storage_cache_management_client_enums import DomainJoinedType
-from ._storage_cache_management_client_enums import FilesystemSubnetStatusType
-from ._storage_cache_management_client_enums import FirmwareStatusType
-from ._storage_cache_management_client_enums import HealthStateType
-from ._storage_cache_management_client_enums import ImportJobProvisioningStateType
-from ._storage_cache_management_client_enums import ImportStatusType
-from ._storage_cache_management_client_enums import MaintenanceDayOfWeekType
-from ._storage_cache_management_client_enums import MetricAggregationType
-from ._storage_cache_management_client_enums import NfsAccessRuleAccess
-from ._storage_cache_management_client_enums import NfsAccessRuleScope
-from ._storage_cache_management_client_enums import OperationalStateType
-from ._storage_cache_management_client_enums import PrimingJobState
-from ._storage_cache_management_client_enums import ProvisioningStateType
-from ._storage_cache_management_client_enums import ReasonCode
-from ._storage_cache_management_client_enums import StorageTargetType
-from ._storage_cache_management_client_enums import UsernameDownloadedType
-from ._storage_cache_management_client_enums import UsernameSource
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+
+from ._models_py3 import ( # type: ignore
+ AmlFilesystem,
+ AmlFilesystemArchive,
+ AmlFilesystemArchiveInfo,
+ AmlFilesystemArchiveStatus,
+ AmlFilesystemCheckSubnetError,
+ AmlFilesystemCheckSubnetErrorFilesystemSubnet,
+ AmlFilesystemClientInfo,
+ AmlFilesystemContainerStorageInterface,
+ AmlFilesystemEncryptionSettings,
+ AmlFilesystemHealth,
+ AmlFilesystemHsmSettings,
+ AmlFilesystemIdentity,
+ AmlFilesystemPropertiesHsm,
+ AmlFilesystemPropertiesMaintenanceWindow,
+ AmlFilesystemRootSquashSettings,
+ AmlFilesystemSubnetInfo,
+ AmlFilesystemUpdate,
+ AmlFilesystemUpdatePropertiesMaintenanceWindow,
+ AmlFilesystemsListResult,
+ ApiOperation,
+ ApiOperationDisplay,
+ ApiOperationListResult,
+ ApiOperationPropertiesServiceSpecification,
+ AscOperation,
+ AscOperationErrorResponse,
+ AutoExportJob,
+ AutoExportJobUpdate,
+ AutoExportJobsListResult,
+ BlobNfsTarget,
+ Cache,
+ CacheActiveDirectorySettings,
+ CacheActiveDirectorySettingsCredentials,
+ CacheDirectorySettings,
+ CacheEncryptionSettings,
+ CacheHealth,
+ CacheIdentity,
+ CacheNetworkSettings,
+ CacheSecuritySettings,
+ CacheSku,
+ CacheUpgradeSettings,
+ CacheUpgradeStatus,
+ CacheUsernameDownloadSettings,
+ CacheUsernameDownloadSettingsCredentials,
+ CachesListResult,
+ ClfsTarget,
+ CloudErrorBody,
+ Condition,
+ ErrorAdditionalInfo,
+ ErrorDetail,
+ ErrorResponse,
+ ImportJob,
+ ImportJobUpdate,
+ ImportJobsListResult,
+ KeyVaultKeyReference,
+ KeyVaultKeyReferenceSourceVault,
+ LogSpecification,
+ MetricDimension,
+ MetricSpecification,
+ NamespaceJunction,
+ Nfs3Target,
+ NfsAccessPolicy,
+ NfsAccessRule,
+ PrimingJob,
+ PrimingJobIdParameter,
+ RequiredAmlFilesystemSubnetsSize,
+ RequiredAmlFilesystemSubnetsSizeInfo,
+ Resource,
+ ResourceSku,
+ ResourceSkuCapabilities,
+ ResourceSkuLocationInfo,
+ ResourceSkusResult,
+ ResourceUsage,
+ ResourceUsageName,
+ ResourceUsagesListResult,
+ Restriction,
+ SkuName,
+ StorageTarget,
+ StorageTargetResource,
+ StorageTargetSpaceAllocation,
+ StorageTargetsResult,
+ SystemData,
+ TrackedResource,
+ UnknownTarget,
+ UsageModel,
+ UsageModelDisplay,
+ UsageModelsResult,
+ UserAssignedIdentitiesValue,
+ UserAssignedIdentitiesValueAutoGenerated,
+)
+
+from ._storage_cache_management_client_enums import ( # type: ignore
+ AmlFilesystemHealthStateType,
+ AmlFilesystemIdentityType,
+ AmlFilesystemProvisioningStateType,
+ AmlFilesystemSquashMode,
+ ArchiveStatusType,
+ AutoExportJobAdminStatus,
+ AutoExportJobProvisioningStateType,
+ AutoExportStatusType,
+ CacheIdentityType,
+ ConflictResolutionMode,
+ CreatedByType,
+ DomainJoinedType,
+ FilesystemSubnetStatusType,
+ FirmwareStatusType,
+ HealthStateType,
+ ImportJobAdminStatus,
+ ImportJobProvisioningStateType,
+ ImportStatusType,
+ MaintenanceDayOfWeekType,
+ MetricAggregationType,
+ NfsAccessRuleAccess,
+ NfsAccessRuleScope,
+ OperationalStateType,
+ PrimingJobState,
+ ProvisioningStateType,
+ ReasonCode,
+ StorageTargetType,
+ UsernameDownloadedType,
+ UsernameSource,
+)
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -147,6 +165,9 @@
"ApiOperationPropertiesServiceSpecification",
"AscOperation",
"AscOperationErrorResponse",
+ "AutoExportJob",
+ "AutoExportJobUpdate",
+ "AutoExportJobsListResult",
"BlobNfsTarget",
"Cache",
"CacheActiveDirectorySettings",
@@ -212,6 +233,9 @@
"AmlFilesystemProvisioningStateType",
"AmlFilesystemSquashMode",
"ArchiveStatusType",
+ "AutoExportJobAdminStatus",
+ "AutoExportJobProvisioningStateType",
+ "AutoExportStatusType",
"CacheIdentityType",
"ConflictResolutionMode",
"CreatedByType",
@@ -219,6 +243,7 @@
"FilesystemSubnetStatusType",
"FirmwareStatusType",
"HealthStateType",
+ "ImportJobAdminStatus",
"ImportJobProvisioningStateType",
"ImportStatusType",
"MaintenanceDayOfWeekType",
@@ -233,5 +258,5 @@
"UsernameDownloadedType",
"UsernameSource",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/models/_models_py3.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/models/_models_py3.py
index 6ef2137f221c..4566f996e469 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/models/_models_py3.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/models/_models_py3.py
@@ -1,5 +1,5 @@
-# coding=utf-8
# pylint: disable=too-many-lines
+# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
@@ -16,10 +16,9 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from .. import models as _models
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
@@ -119,7 +118,7 @@ def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kw
self.location = location
-class AmlFilesystem(TrackedResource): # pylint: disable=too-many-instance-attributes
+class AmlFilesystem(TrackedResource):
"""An AML file system instance. Follows Azure Resource Manager standards:
https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/resource-api-reference.md.
@@ -1265,6 +1264,240 @@ def __init__(self, *, code: Optional[str] = None, message: Optional[str] = None,
self.message = message
+class AutoExportJob(TrackedResource):
+ """An auto export job instance. Follows Azure Resource Manager standards:
+ https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/resource-api-reference.md.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
+ information.
+ :vartype system_data: ~azure.mgmt.storagecache.models.SystemData
+ :ivar tags: Resource tags.
+ :vartype tags: dict[str, str]
+ :ivar location: The geo-location where the resource lives. Required.
+ :vartype location: str
+ :ivar provisioning_state: ARM provisioning state. Known values are: "Succeeded", "Failed",
+ "Creating", "Deleting", "Updating", and "Canceled".
+ :vartype provisioning_state: str or
+ ~azure.mgmt.storagecache.models.AutoExportJobProvisioningStateType
+ :ivar admin_status: The administrative status of the auto export job. Possible values:
+ 'Enable', 'Disable'. Passing in a value of 'Disable' will disable the current active auto
+ export job. By default it is set to 'Enable'. Known values are: "Active" and "Cancel".
+ :vartype admin_status: str or ~azure.mgmt.storagecache.models.AutoExportJobAdminStatus
+ :ivar auto_export_prefixes: An array of blob paths/prefixes that get auto exported to the
+ cluster namespace. It has '/' as the default value. Number of maximum allowed paths for now is
+ 1.
+ :vartype auto_export_prefixes: list[str]
+ :ivar state: The operational state of auto export. InProgress indicates the export is running.
+ Disabling indicates the user has requested to disable the export but the disabling is still in
+ progress. Disabled indicates auto export has been disabled. DisableFailed indicates the
+ disabling has failed. Failed means the export was unable to continue, due to a fatal error.
+ Known values are: "InProgress", "Disabling", "Disabled", "DisableFailed", and "Failed".
+ :vartype state: str or ~azure.mgmt.storagecache.models.AutoExportStatusType
+ :ivar status_code: Server-defined status code for auto export job.
+ :vartype status_code: str
+ :ivar status_message: Server-defined status message for auto export job.
+ :vartype status_message: str
+ :ivar total_files_exported: Total files exported since the start of the export. This is
+ accumulative, some files may be counted repeatedly.
+ :vartype total_files_exported: int
+ :ivar total_mi_b_exported: Total data (in MiB) exported since the start of the export. This is
+ accumulative, some files may be counted repeatedly.
+ :vartype total_mi_b_exported: int
+ :ivar total_files_failed: Total files failed to be export since the last successfully completed
+ iteration. This is accumulative, some files may be counted repeatedly.
+ :vartype total_files_failed: int
+ :ivar export_iteration_count: Number of iterations completed since the start of the export.
+ :vartype export_iteration_count: int
+ :ivar last_successful_iteration_completion_time_utc: Time (in UTC) of the last successfully
+ completed export iteration. Look at logging container for details.
+ :vartype last_successful_iteration_completion_time_utc: ~datetime.datetime
+ :ivar current_iteration_files_discovered: Files discovered for export in current iteration. It
+ may increase while more export items are found.
+ :vartype current_iteration_files_discovered: int
+ :ivar current_iteration_mi_b_discovered: Data (in MiB) discovered for export in current
+ iteration. It may increase while more export items are found.
+ :vartype current_iteration_mi_b_discovered: int
+ :ivar current_iteration_files_exported: Files that have been exported in current iteration.
+ :vartype current_iteration_files_exported: int
+ :ivar current_iteration_mi_b_exported: Data (in MiB) that have been exported in current
+ iteration.
+ :vartype current_iteration_mi_b_exported: int
+ :ivar current_iteration_files_failed: Files failed to export in current iteration.
+ :vartype current_iteration_files_failed: int
+ :ivar last_started_time_utc: The time (in UTC) the latest auto export job started.
+ :vartype last_started_time_utc: ~datetime.datetime
+ :ivar last_completion_time_utc: The time (in UTC) of the last completed auto export job.
+ :vartype last_completion_time_utc: ~datetime.datetime
+ """
+
+ _validation = {
+ "id": {"readonly": True},
+ "name": {"readonly": True},
+ "type": {"readonly": True},
+ "system_data": {"readonly": True},
+ "location": {"required": True},
+ "provisioning_state": {"readonly": True},
+ "status_code": {"readonly": True},
+ "status_message": {"readonly": True},
+ "total_files_exported": {"readonly": True},
+ "total_mi_b_exported": {"readonly": True},
+ "total_files_failed": {"readonly": True},
+ "export_iteration_count": {"readonly": True},
+ "last_successful_iteration_completion_time_utc": {"readonly": True},
+ "current_iteration_files_discovered": {"readonly": True},
+ "current_iteration_mi_b_discovered": {"readonly": True},
+ "current_iteration_files_exported": {"readonly": True},
+ "current_iteration_mi_b_exported": {"readonly": True},
+ "current_iteration_files_failed": {"readonly": True},
+ "last_started_time_utc": {"readonly": True},
+ "last_completion_time_utc": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "id": {"key": "id", "type": "str"},
+ "name": {"key": "name", "type": "str"},
+ "type": {"key": "type", "type": "str"},
+ "system_data": {"key": "systemData", "type": "SystemData"},
+ "tags": {"key": "tags", "type": "{str}"},
+ "location": {"key": "location", "type": "str"},
+ "provisioning_state": {"key": "properties.provisioningState", "type": "str"},
+ "admin_status": {"key": "properties.adminStatus", "type": "str"},
+ "auto_export_prefixes": {"key": "properties.autoExportPrefixes", "type": "[str]"},
+ "state": {"key": "properties.status.state", "type": "str"},
+ "status_code": {"key": "properties.status.statusCode", "type": "str"},
+ "status_message": {"key": "properties.status.statusMessage", "type": "str"},
+ "total_files_exported": {"key": "properties.status.totalFilesExported", "type": "int"},
+ "total_mi_b_exported": {"key": "properties.status.totalMiBExported", "type": "int"},
+ "total_files_failed": {"key": "properties.status.totalFilesFailed", "type": "int"},
+ "export_iteration_count": {"key": "properties.status.exportIterationCount", "type": "int"},
+ "last_successful_iteration_completion_time_utc": {
+ "key": "properties.status.lastSuccessfulIterationCompletionTimeUTC",
+ "type": "iso-8601",
+ },
+ "current_iteration_files_discovered": {
+ "key": "properties.status.currentIterationFilesDiscovered",
+ "type": "int",
+ },
+ "current_iteration_mi_b_discovered": {"key": "properties.status.currentIterationMiBDiscovered", "type": "int"},
+ "current_iteration_files_exported": {"key": "properties.status.currentIterationFilesExported", "type": "int"},
+ "current_iteration_mi_b_exported": {"key": "properties.status.currentIterationMiBExported", "type": "int"},
+ "current_iteration_files_failed": {"key": "properties.status.currentIterationFilesFailed", "type": "int"},
+ "last_started_time_utc": {"key": "properties.status.lastStartedTimeUTC", "type": "iso-8601"},
+ "last_completion_time_utc": {"key": "properties.status.lastCompletionTimeUTC", "type": "iso-8601"},
+ }
+
+ def __init__( # pylint: disable=too-many-locals
+ self,
+ *,
+ location: str,
+ tags: Optional[Dict[str, str]] = None,
+ admin_status: Union[str, "_models.AutoExportJobAdminStatus"] = "Active",
+ auto_export_prefixes: Optional[List[str]] = None,
+ state: Optional[Union[str, "_models.AutoExportStatusType"]] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword tags: Resource tags.
+ :paramtype tags: dict[str, str]
+ :keyword location: The geo-location where the resource lives. Required.
+ :paramtype location: str
+ :keyword admin_status: The administrative status of the auto export job. Possible values:
+ 'Enable', 'Disable'. Passing in a value of 'Disable' will disable the current active auto
+ export job. By default it is set to 'Enable'. Known values are: "Active" and "Cancel".
+ :paramtype admin_status: str or ~azure.mgmt.storagecache.models.AutoExportJobAdminStatus
+ :keyword auto_export_prefixes: An array of blob paths/prefixes that get auto exported to the
+ cluster namespace. It has '/' as the default value. Number of maximum allowed paths for now is
+ 1.
+ :paramtype auto_export_prefixes: list[str]
+ :keyword state: The operational state of auto export. InProgress indicates the export is
+ running. Disabling indicates the user has requested to disable the export but the disabling is
+ still in progress. Disabled indicates auto export has been disabled. DisableFailed indicates
+ the disabling has failed. Failed means the export was unable to continue, due to a fatal
+ error. Known values are: "InProgress", "Disabling", "Disabled", "DisableFailed", and "Failed".
+ :paramtype state: str or ~azure.mgmt.storagecache.models.AutoExportStatusType
+ """
+ super().__init__(tags=tags, location=location, **kwargs)
+ self.provisioning_state = None
+ self.admin_status = admin_status
+ self.auto_export_prefixes = auto_export_prefixes
+ self.state = state
+ self.status_code = None
+ self.status_message = None
+ self.total_files_exported = None
+ self.total_mi_b_exported = None
+ self.total_files_failed = None
+ self.export_iteration_count = None
+ self.last_successful_iteration_completion_time_utc = None
+ self.current_iteration_files_discovered = None
+ self.current_iteration_mi_b_discovered = None
+ self.current_iteration_files_exported = None
+ self.current_iteration_mi_b_exported = None
+ self.current_iteration_files_failed = None
+ self.last_started_time_utc = None
+ self.last_completion_time_utc = None
+
+
+class AutoExportJobsListResult(_serialization.Model):
+ """Result of the request to list auto export jobs. It contains a list of auto export jobs and a
+ URL link to get the next set of results.
+
+ :ivar next_link: URL to get the next set of auto export job list results, if there are any.
+ :vartype next_link: str
+ :ivar value: List of auto export jobs.
+ :vartype value: list[~azure.mgmt.storagecache.models.AutoExportJob]
+ """
+
+ _attribute_map = {
+ "next_link": {"key": "nextLink", "type": "str"},
+ "value": {"key": "value", "type": "[AutoExportJob]"},
+ }
+
+ def __init__(
+ self, *, next_link: Optional[str] = None, value: Optional[List["_models.AutoExportJob"]] = None, **kwargs: Any
+ ) -> None:
+ """
+ :keyword next_link: URL to get the next set of auto export job list results, if there are any.
+ :paramtype next_link: str
+ :keyword value: List of auto export jobs.
+ :paramtype value: list[~azure.mgmt.storagecache.models.AutoExportJob]
+ """
+ super().__init__(**kwargs)
+ self.next_link = next_link
+ self.value = value
+
+
+class AutoExportJobUpdate(_serialization.Model):
+ """An auto export job update instance.
+
+ :ivar tags: Resource tags.
+ :vartype tags: dict[str, str]
+ """
+
+ _attribute_map = {
+ "tags": {"key": "tags", "type": "{str}"},
+ }
+
+ def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None:
+ """
+ :keyword tags: Resource tags.
+ :paramtype tags: dict[str, str]
+ """
+ super().__init__(**kwargs)
+ self.tags = tags
+
+
class BlobNfsTarget(_serialization.Model):
"""Properties pertaining to the BlobNfsTarget.
@@ -1316,7 +1549,7 @@ def __init__(
self.write_back_timer = write_back_timer
-class Cache(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class Cache(_serialization.Model):
"""A cache instance. Follows Azure Resource Manager standards:
https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/resource-api-reference.md.
@@ -2005,7 +2238,7 @@ def __init__(self, **kwargs: Any) -> None:
self.pending_firmware_version = None
-class CacheUsernameDownloadSettings(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class CacheUsernameDownloadSettings(_serialization.Model):
"""Settings for Extended Groups username and group download.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -2346,7 +2579,7 @@ def __init__(self, *, error: Optional["_models.ErrorDetail"] = None, **kwargs: A
self.error = error
-class ImportJob(TrackedResource): # pylint: disable=too-many-instance-attributes
+class ImportJob(TrackedResource):
"""An import job instance. Follows Azure Resource Manager standards:
https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/resource-api-reference.md.
@@ -2373,6 +2606,10 @@ class ImportJob(TrackedResource): # pylint: disable=too-many-instance-attribute
"Creating", "Deleting", "Updating", and "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.storagecache.models.ImportJobProvisioningStateType
+ :ivar admin_status: The administrative status of the import job. Possible values: 'Enable',
+ 'Disable'. Passing in a value of 'Disable' will cancel the current active import job. By
+ default it is set to 'Enable'. Known values are: "Active" and "Cancel".
+ :vartype admin_status: str or ~azure.mgmt.storagecache.models.ImportJobAdminStatus
:ivar import_prefixes: An array of blob paths/prefixes that get imported into the cluster
namespace. It has '/' as the default value.
:vartype import_prefixes: list[str]
@@ -2391,13 +2628,13 @@ class ImportJob(TrackedResource): # pylint: disable=too-many-instance-attribute
:ivar maximum_errors: Total non-conflict oriented errors the import job will tolerate before
exiting with failure. -1 means infinite. 0 means exit immediately and is the default.
:vartype maximum_errors: int
- :ivar state: The state of the import job. InProgress indicates the import is still running.
- Canceled indicates it has been canceled by the user. Completed indicates import finished,
- successfully importing all discovered blobs into the Lustre namespace. CompletedPartial
- indicates the import finished but some blobs either were found to be conflicting and could not
- be imported or other errors were encountered. Failed means the import was unable to complete
- due to a fatal error. Known values are: "InProgress", "Cancelling", "Canceled", "Completed",
- "CompletedPartial", and "Failed".
+ :ivar state: The operational state of the import job. InProgress indicates the import is still
+ running. Canceled indicates it has been canceled by the user. Completed indicates import
+ finished, successfully importing all discovered blobs into the Lustre namespace.
+ CompletedPartial indicates the import finished but some blobs either were found to be
+ conflicting and could not be imported or other errors were encountered. Failed means the import
+ was unable to complete due to a fatal error. Known values are: "InProgress", "Cancelling",
+ "Canceled", "Completed", "CompletedPartial", and "Failed".
:vartype state: str or ~azure.mgmt.storagecache.models.ImportStatusType
:ivar status_message: The status message of the import job.
:vartype status_message: str
@@ -2407,12 +2644,27 @@ class ImportJob(TrackedResource): # pylint: disable=too-many-instance-attribute
:vartype blobs_walked_per_second: int
:ivar total_blobs_imported: The total blobs that have been imported since import began.
:vartype total_blobs_imported: int
+ :ivar imported_files: New or modified files that have been imported into the filesystem.
+ :vartype imported_files: int
+ :ivar imported_directories: New or modified directories that have been imported into the
+ filesystem.
+ :vartype imported_directories: int
+ :ivar imported_symlinks: Newly added symbolic links into the filesystem.
+ :vartype imported_symlinks: int
+ :ivar preexisting_files: Files that already exist in the filesystem and have not been modified.
+ :vartype preexisting_files: int
+ :ivar preexisting_directories: Directories that already exist in the filesystem and have not
+ been modified.
+ :vartype preexisting_directories: int
+ :ivar preexisting_symlinks: Symbolic links that already exist in the filesystem and have not
+ been modified.
+ :vartype preexisting_symlinks: int
:ivar blobs_imported_per_second: A recent and frequently updated rate of total files,
directories, and symlinks imported per second.
:vartype blobs_imported_per_second: int
- :ivar last_completion_time: The time of the last completed archive operation.
+ :ivar last_completion_time: The time (in UTC) of the last completed import job.
:vartype last_completion_time: ~datetime.datetime
- :ivar last_started_time: The time the latest archive operation started.
+ :ivar last_started_time: The time (in UTC) the latest import job started.
:vartype last_started_time: ~datetime.datetime
:ivar total_errors: Number of errors in the import job.
:vartype total_errors: int
@@ -2432,6 +2684,12 @@ class ImportJob(TrackedResource): # pylint: disable=too-many-instance-attribute
"total_blobs_walked": {"readonly": True},
"blobs_walked_per_second": {"readonly": True},
"total_blobs_imported": {"readonly": True},
+ "imported_files": {"readonly": True},
+ "imported_directories": {"readonly": True},
+ "imported_symlinks": {"readonly": True},
+ "preexisting_files": {"readonly": True},
+ "preexisting_directories": {"readonly": True},
+ "preexisting_symlinks": {"readonly": True},
"blobs_imported_per_second": {"readonly": True},
"last_completion_time": {"readonly": True},
"last_started_time": {"readonly": True},
@@ -2447,6 +2705,7 @@ class ImportJob(TrackedResource): # pylint: disable=too-many-instance-attribute
"tags": {"key": "tags", "type": "{str}"},
"location": {"key": "location", "type": "str"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
+ "admin_status": {"key": "properties.adminStatus", "type": "str"},
"import_prefixes": {"key": "properties.importPrefixes", "type": "[str]"},
"conflict_resolution_mode": {"key": "properties.conflictResolutionMode", "type": "str"},
"maximum_errors": {"key": "properties.maximumErrors", "type": "int"},
@@ -2455,6 +2714,12 @@ class ImportJob(TrackedResource): # pylint: disable=too-many-instance-attribute
"total_blobs_walked": {"key": "properties.status.totalBlobsWalked", "type": "int"},
"blobs_walked_per_second": {"key": "properties.status.blobsWalkedPerSecond", "type": "int"},
"total_blobs_imported": {"key": "properties.status.totalBlobsImported", "type": "int"},
+ "imported_files": {"key": "properties.status.importedFiles", "type": "int"},
+ "imported_directories": {"key": "properties.status.importedDirectories", "type": "int"},
+ "imported_symlinks": {"key": "properties.status.importedSymlinks", "type": "int"},
+ "preexisting_files": {"key": "properties.status.preexistingFiles", "type": "int"},
+ "preexisting_directories": {"key": "properties.status.preexistingDirectories", "type": "int"},
+ "preexisting_symlinks": {"key": "properties.status.preexistingSymlinks", "type": "int"},
"blobs_imported_per_second": {"key": "properties.status.blobsImportedPerSecond", "type": "int"},
"last_completion_time": {"key": "properties.status.lastCompletionTime", "type": "iso-8601"},
"last_started_time": {"key": "properties.status.lastStartedTime", "type": "iso-8601"},
@@ -2462,11 +2727,12 @@ class ImportJob(TrackedResource): # pylint: disable=too-many-instance-attribute
"total_conflicts": {"key": "properties.status.totalConflicts", "type": "int"},
}
- def __init__(
+ def __init__( # pylint: disable=too-many-locals
self,
*,
location: str,
tags: Optional[Dict[str, str]] = None,
+ admin_status: Union[str, "_models.ImportJobAdminStatus"] = "Active",
import_prefixes: Optional[List[str]] = None,
conflict_resolution_mode: Union[str, "_models.ConflictResolutionMode"] = "Fail",
maximum_errors: int = 0,
@@ -2477,6 +2743,10 @@ def __init__(
:paramtype tags: dict[str, str]
:keyword location: The geo-location where the resource lives. Required.
:paramtype location: str
+ :keyword admin_status: The administrative status of the import job. Possible values: 'Enable',
+ 'Disable'. Passing in a value of 'Disable' will cancel the current active import job. By
+ default it is set to 'Enable'. Known values are: "Active" and "Cancel".
+ :paramtype admin_status: str or ~azure.mgmt.storagecache.models.ImportJobAdminStatus
:keyword import_prefixes: An array of blob paths/prefixes that get imported into the cluster
namespace. It has '/' as the default value.
:paramtype import_prefixes: list[str]
@@ -2498,6 +2768,7 @@ def __init__(
"""
super().__init__(tags=tags, location=location, **kwargs)
self.provisioning_state = None
+ self.admin_status = admin_status
self.import_prefixes = import_prefixes
self.conflict_resolution_mode = conflict_resolution_mode
self.maximum_errors = maximum_errors
@@ -2506,6 +2777,12 @@ def __init__(
self.total_blobs_walked = None
self.blobs_walked_per_second = None
self.total_blobs_imported = None
+ self.imported_files = None
+ self.imported_directories = None
+ self.imported_symlinks = None
+ self.preexisting_files = None
+ self.preexisting_directories = None
+ self.preexisting_symlinks = None
self.blobs_imported_per_second = None
self.last_completion_time = None
self.last_started_time = None
@@ -3488,7 +3765,7 @@ def __init__(self, **kwargs: Any) -> None:
self.system_data = None
-class StorageTarget(StorageTargetResource): # pylint: disable=too-many-instance-attributes
+class StorageTarget(StorageTargetResource):
"""Type of the Storage Target.
Variables are only populated by the server, and will be ignored when sending a request.
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/models/_storage_cache_management_client_enums.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/models/_storage_cache_management_client_enums.py
index 96d4fa3436a2..620d4286ea4b 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/models/_storage_cache_management_client_enums.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/models/_storage_cache_management_client_enums.py
@@ -63,6 +63,41 @@ class ArchiveStatusType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
FS_SCAN_IN_PROGRESS = "FSScanInProgress"
+class AutoExportJobAdminStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """The administrative status of the auto export job. Possible values: 'Enable', 'Disable'. Passing
+ in a value of 'Disable' will disable the current active auto export job. By default it is set
+ to 'Enable'.
+ """
+
+ ACTIVE = "Active"
+ CANCEL = "Cancel"
+
+
+class AutoExportJobProvisioningStateType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """ARM provisioning state."""
+
+ SUCCEEDED = "Succeeded"
+ FAILED = "Failed"
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ UPDATING = "Updating"
+ CANCELED = "Canceled"
+
+
+class AutoExportStatusType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """The operational state of auto export. InProgress indicates the export is running. Disabling
+ indicates the user has requested to disable the export but the disabling is still in progress.
+ Disabled indicates auto export has been disabled. DisableFailed indicates the disabling has
+ failed. Failed means the export was unable to continue, due to a fatal error.
+ """
+
+ IN_PROGRESS = "InProgress"
+ DISABLING = "Disabling"
+ DISABLED = "Disabled"
+ DISABLE_FAILED = "DisableFailed"
+ FAILED = "Failed"
+
+
class CacheIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The type of identity used for the cache."""
@@ -145,6 +180,15 @@ class HealthStateType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
UPGRADE_FAILED = "UpgradeFailed"
+class ImportJobAdminStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """The administrative status of the import job. Possible values: 'Enable', 'Disable'. Passing in a
+ value of 'Disable' will cancel the current active import job. By default it is set to 'Enable'.
+ """
+
+ ACTIVE = "Active"
+ CANCEL = "Cancel"
+
+
class ImportJobProvisioningStateType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""ARM provisioning state."""
@@ -157,11 +201,12 @@ class ImportJobProvisioningStateType(str, Enum, metaclass=CaseInsensitiveEnumMet
class ImportStatusType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
- """The state of the import job. InProgress indicates the import is still running. Canceled
- indicates it has been canceled by the user. Completed indicates import finished, successfully
- importing all discovered blobs into the Lustre namespace. CompletedPartial indicates the import
- finished but some blobs either were found to be conflicting and could not be imported or other
- errors were encountered. Failed means the import was unable to complete due to a fatal error.
+ """The operational state of the import job. InProgress indicates the import is still running.
+ Canceled indicates it has been canceled by the user. Completed indicates import finished,
+ successfully importing all discovered blobs into the Lustre namespace. CompletedPartial
+ indicates the import finished but some blobs either were found to be conflicting and could not
+ be imported or other errors were encountered. Failed means the import was unable to complete
+ due to a fatal error.
"""
IN_PROGRESS = "InProgress"
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/__init__.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/__init__.py
index 4ada12103750..321526fcb39e 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/__init__.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/__init__.py
@@ -5,25 +5,33 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._aml_filesystems_operations import AmlFilesystemsOperations
-from ._import_jobs_operations import ImportJobsOperations
-from ._storage_cache_management_client_operations import StorageCacheManagementClientOperationsMixin
-from ._operations import Operations
-from ._skus_operations import SkusOperations
-from ._usage_models_operations import UsageModelsOperations
-from ._asc_operations_operations import AscOperationsOperations
-from ._asc_usages_operations import AscUsagesOperations
-from ._caches_operations import CachesOperations
-from ._storage_targets_operations import StorageTargetsOperations
-from ._storage_target_operations import StorageTargetOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._aml_filesystems_operations import AmlFilesystemsOperations # type: ignore
+from ._auto_export_jobs_operations import AutoExportJobsOperations # type: ignore
+from ._import_jobs_operations import ImportJobsOperations # type: ignore
+from ._storage_cache_management_client_operations import StorageCacheManagementClientOperationsMixin # type: ignore
+from ._operations import Operations # type: ignore
+from ._skus_operations import SkusOperations # type: ignore
+from ._usage_models_operations import UsageModelsOperations # type: ignore
+from ._asc_operations_operations import AscOperationsOperations # type: ignore
+from ._asc_usages_operations import AscUsagesOperations # type: ignore
+from ._caches_operations import CachesOperations # type: ignore
+from ._storage_targets_operations import StorageTargetsOperations # type: ignore
+from ._storage_target_operations import StorageTargetOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"AmlFilesystemsOperations",
+ "AutoExportJobsOperations",
"ImportJobsOperations",
"StorageCacheManagementClientOperationsMixin",
"Operations",
@@ -35,5 +43,5 @@
"StorageTargetsOperations",
"StorageTargetOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_aml_filesystems_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_aml_filesystems_operations.py
index ad4b25928393..1b63ddb620b0 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_aml_filesystems_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_aml_filesystems_operations.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines,too-many-statements
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +7,8 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -16,13 +17,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -30,8 +32,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import StorageCacheManagementClientMixinABC, _convert_request
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -43,7 +48,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -67,7 +72,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -99,7 +104,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -139,7 +144,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -179,7 +184,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -222,7 +227,7 @@ def build_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -265,7 +270,7 @@ def build_archive_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -308,7 +313,7 @@ def build_cancel_archive_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -375,7 +380,7 @@ def list(self, **kwargs: Any) -> Iterable["_models.AmlFilesystem"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.AmlFilesystemsListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -392,7 +397,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -408,7 +412,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
@@ -454,7 +457,7 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.AmlFilesystemsListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -472,7 +475,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -488,7 +490,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
@@ -517,10 +518,8 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- def _delete_initial( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, aml_filesystem_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ def _delete_initial(self, resource_group_name: str, aml_filesystem_name: str, **kwargs: Any) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -532,7 +531,7 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
_request = build_delete_request(
resource_group_name=resource_group_name,
@@ -542,10 +541,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -553,6 +552,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -563,8 +566,12 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(self, resource_group_name: str, aml_filesystem_name: str, **kwargs: Any) -> LROPoller[None]:
@@ -589,7 +596,7 @@ def begin_delete(self, resource_group_name: str, aml_filesystem_name: str, **kwa
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
aml_filesystem_name=aml_filesystem_name,
api_version=api_version,
@@ -598,6 +605,7 @@ def begin_delete(self, resource_group_name: str, aml_filesystem_name: str, **kwa
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -635,7 +643,7 @@ def get(self, resource_group_name: str, aml_filesystem_name: str, **kwargs: Any)
:rtype: ~azure.mgmt.storagecache.models.AmlFilesystem
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -657,7 +665,6 @@ def get(self, resource_group_name: str, aml_filesystem_name: str, **kwargs: Any)
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -671,7 +678,7 @@ def get(self, resource_group_name: str, aml_filesystem_name: str, **kwargs: Any)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("AmlFilesystem", pipeline_response)
+ deserialized = self._deserialize("AmlFilesystem", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
@@ -684,8 +691,8 @@ def _create_or_update_initial(
aml_filesystem_name: str,
aml_filesystem: Union[_models.AmlFilesystem, IO[bytes]],
**kwargs: Any
- ) -> _models.AmlFilesystem:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -698,7 +705,7 @@ def _create_or_update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.AmlFilesystem] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -719,10 +726,10 @@ def _create_or_update_initial(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -730,19 +737,20 @@ def _create_or_update_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
- if response.status_code == 200:
- deserialized = self._deserialize("AmlFilesystem", pipeline_response)
-
if response.status_code == 201:
response_headers["azure-async-operation"] = self._deserialize(
"str", response.headers.get("azure-async-operation")
)
- deserialized = self._deserialize("AmlFilesystem", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -857,10 +865,11 @@ def begin_create_or_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("AmlFilesystem", pipeline_response)
+ deserialized = self._deserialize("AmlFilesystem", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -890,8 +899,8 @@ def _update_initial(
aml_filesystem_name: str,
aml_filesystem: Union[_models.AmlFilesystemUpdate, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.AmlFilesystem]:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -904,7 +913,7 @@ def _update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.AmlFilesystem]] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -925,10 +934,10 @@ def _update_initial(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -936,20 +945,22 @@ def _update_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
- if response.status_code == 200:
- deserialized = self._deserialize("AmlFilesystem", pipeline_response)
-
if response.status_code == 202:
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["azure-async-operation"] = self._deserialize(
"str", response.headers.get("azure-async-operation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -1063,10 +1074,11 @@ def begin_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("AmlFilesystem", pipeline_response)
+ deserialized = self._deserialize("AmlFilesystem", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -1091,7 +1103,7 @@ def get_long_running_output(pipeline_response):
)
@overload
- def archive( # pylint: disable=inconsistent-return-statements
+ def archive(
self,
resource_group_name: str,
aml_filesystem_name: str,
@@ -1119,7 +1131,7 @@ def archive( # pylint: disable=inconsistent-return-statements
"""
@overload
- def archive( # pylint: disable=inconsistent-return-statements
+ def archive(
self,
resource_group_name: str,
aml_filesystem_name: str,
@@ -1169,7 +1181,7 @@ def archive( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1206,7 +1218,6 @@ def archive( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -1239,7 +1250,7 @@ def cancel_archive( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1261,7 +1272,6 @@ def cancel_archive( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_asc_operations_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_asc_operations_operations.py
index 1b5c8abb97d8..5db92361b80a 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_asc_operations_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_asc_operations_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, Callable, Dict, Optional, TypeVar
from azure.core.exceptions import (
@@ -17,16 +17,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import StorageCacheManagementClientMixinABC, _convert_request
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -38,7 +40,7 @@ def build_get_request(location: str, operation_id: str, subscription_id: str, **
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -94,7 +96,7 @@ def get(self, location: str, operation_id: str, **kwargs: Any) -> _models.AscOpe
:rtype: ~azure.mgmt.storagecache.models.AscOperation
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -116,7 +118,6 @@ def get(self, location: str, operation_id: str, **kwargs: Any) -> _models.AscOpe
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -130,7 +131,7 @@ def get(self, location: str, operation_id: str, **kwargs: Any) -> _models.AscOpe
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("AscOperation", pipeline_response)
+ deserialized = self._deserialize("AscOperation", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_asc_usages_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_asc_usages_operations.py
index 54182d5d1334..f69f667e6d0b 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_asc_usages_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_asc_usages_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
import urllib.parse
@@ -19,16 +19,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import StorageCacheManagementClientMixinABC, _convert_request
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -40,7 +42,7 @@ def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> Ht
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -98,7 +100,7 @@ def list(self, location: str, **kwargs: Any) -> Iterable["_models.ResourceUsage"
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ResourceUsagesListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -116,7 +118,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -132,7 +133,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_auto_export_jobs_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_auto_export_jobs_operations.py
new file mode 100644
index 000000000000..cba3c1997286
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_auto_export_jobs_operations.py
@@ -0,0 +1,1019 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from io import IOBase
+import sys
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
+import urllib.parse
+
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
+ map_error,
+)
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.rest import HttpRequest, HttpResponse
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.utils import case_insensitive_dict
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models as _models
+from .._serialization import Serializer
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+
+
+def build_delete_request(
+ resource_group_name: str, aml_filesystem_name: str, auto_export_job_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName}",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url(
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
+ ),
+ "amlFilesystemName": _SERIALIZER.url(
+ "aml_filesystem_name",
+ aml_filesystem_name,
+ "str",
+ max_length=80,
+ min_length=2,
+ pattern=r"^[0-9a-zA-Z][-0-9a-zA-Z_]{0,78}[0-9a-zA-Z]$",
+ ),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "autoExportJobName": _SERIALIZER.url(
+ "auto_export_job_name",
+ auto_export_job_name,
+ "str",
+ max_length=80,
+ min_length=2,
+ pattern=r"^[0-9a-zA-Z][-0-9a-zA-Z_]{0,78}[0-9a-zA-Z]$",
+ ),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_get_request(
+ resource_group_name: str, aml_filesystem_name: str, auto_export_job_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName}",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url(
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
+ ),
+ "amlFilesystemName": _SERIALIZER.url(
+ "aml_filesystem_name",
+ aml_filesystem_name,
+ "str",
+ max_length=80,
+ min_length=2,
+ pattern=r"^[0-9a-zA-Z][-0-9a-zA-Z_]{0,78}[0-9a-zA-Z]$",
+ ),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "autoExportJobName": _SERIALIZER.url(
+ "auto_export_job_name",
+ auto_export_job_name,
+ "str",
+ max_length=80,
+ min_length=2,
+ pattern=r"^[0-9a-zA-Z][-0-9a-zA-Z_]{0,78}[0-9a-zA-Z]$",
+ ),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_create_or_update_request(
+ resource_group_name: str, aml_filesystem_name: str, auto_export_job_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName}",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url(
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
+ ),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "amlFilesystemName": _SERIALIZER.url(
+ "aml_filesystem_name",
+ aml_filesystem_name,
+ "str",
+ max_length=80,
+ min_length=2,
+ pattern=r"^[0-9a-zA-Z][-0-9a-zA-Z_]{0,78}[0-9a-zA-Z]$",
+ ),
+ "autoExportJobName": _SERIALIZER.url(
+ "auto_export_job_name",
+ auto_export_job_name,
+ "str",
+ max_length=80,
+ min_length=2,
+ pattern=r"^[0-9a-zA-Z][-0-9a-zA-Z_]{0,78}[0-9a-zA-Z]$",
+ ),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ if content_type is not None:
+ _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_update_request(
+ resource_group_name: str, aml_filesystem_name: str, auto_export_job_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName}",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url(
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
+ ),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "amlFilesystemName": _SERIALIZER.url(
+ "aml_filesystem_name",
+ aml_filesystem_name,
+ "str",
+ max_length=80,
+ min_length=2,
+ pattern=r"^[0-9a-zA-Z][-0-9a-zA-Z_]{0,78}[0-9a-zA-Z]$",
+ ),
+ "autoExportJobName": _SERIALIZER.url(
+ "auto_export_job_name",
+ auto_export_job_name,
+ "str",
+ max_length=80,
+ min_length=2,
+ pattern=r"^[0-9a-zA-Z][-0-9a-zA-Z_]{0,78}[0-9a-zA-Z]$",
+ ),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ if content_type is not None:
+ _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_list_by_aml_filesystem_request(
+ resource_group_name: str, aml_filesystem_name: str, subscription_id: str, **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop(
+ "template_url",
+ "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs",
+ ) # pylint: disable=line-too-long
+ path_format_arguments = {
+ "resourceGroupName": _SERIALIZER.url(
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
+ ),
+ "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
+ "amlFilesystemName": _SERIALIZER.url(
+ "aml_filesystem_name",
+ aml_filesystem_name,
+ "str",
+ max_length=80,
+ min_length=2,
+ pattern=r"^[0-9a-zA-Z][-0-9a-zA-Z_]{0,78}[0-9a-zA-Z]$",
+ ),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+
+ # Construct headers
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+class AutoExportJobsOperations:
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.mgmt.storagecache.StorageCacheManagementClient`'s
+ :attr:`auto_export_jobs` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs):
+ input_args = list(args)
+ self._client = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ def _delete_initial(
+ self, resource_group_name: str, aml_filesystem_name: str, auto_export_job_name: str, **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
+
+ _request = build_delete_request(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ auto_export_job_name=auto_export_job_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+ response_headers["Azure-AsyncOperation"] = self._deserialize(
+ "str", response.headers.get("Azure-AsyncOperation")
+ )
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ def begin_delete(
+ self, resource_group_name: str, aml_filesystem_name: str, auto_export_job_name: str, **kwargs: Any
+ ) -> LROPoller[None]:
+ """Schedules an auto export job for deletion.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ auto_export_job_name=auto_export_job_name,
+ api_version=api_version,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
+ if cls:
+ return cls(pipeline_response, None, {}) # type: ignore
+
+ if polling is True:
+ polling_method: PollingMethod = cast(
+ PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs)
+ )
+ elif polling is False:
+ polling_method = cast(PollingMethod, NoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return LROPoller[None].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
+
+ @distributed_trace
+ def get(
+ self, resource_group_name: str, aml_filesystem_name: str, auto_export_job_name: str, **kwargs: Any
+ ) -> _models.AutoExportJob:
+ """Returns an auto export job.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :return: AutoExportJob or the result of cls(response)
+ :rtype: ~azure.mgmt.storagecache.models.AutoExportJob
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.AutoExportJob] = kwargs.pop("cls", None)
+
+ _request = build_get_request(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ auto_export_job_name=auto_export_job_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize("AutoExportJob", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+
+ return deserialized # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: Union[_models.AutoExportJob, IO[bytes]],
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(auto_export_job, (IOBase, bytes)):
+ _content = auto_export_job
+ else:
+ _json = self._serialize.body(auto_export_job, "AutoExportJob")
+
+ _request = build_create_or_update_request(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ auto_export_job_name=auto_export_job_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ content_type=content_type,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 201:
+ response_headers["azure-async-operation"] = self._deserialize(
+ "str", response.headers.get("azure-async-operation")
+ )
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: _models.AutoExportJob,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.AutoExportJob]:
+ """Create or update an auto export job.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :param auto_export_job: Object containing the user-selectable properties of the auto export
+ job. If read-only properties are included, they must match the existing values of those
+ properties. Required.
+ :type auto_export_job: ~azure.mgmt.storagecache.models.AutoExportJob
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns either AutoExportJob or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagecache.models.AutoExportJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.AutoExportJob]:
+ """Create or update an auto export job.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :param auto_export_job: Object containing the user-selectable properties of the auto export
+ job. If read-only properties are included, they must match the existing values of those
+ properties. Required.
+ :type auto_export_job: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns either AutoExportJob or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagecache.models.AutoExportJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace
+ def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: Union[_models.AutoExportJob, IO[bytes]],
+ **kwargs: Any
+ ) -> LROPoller[_models.AutoExportJob]:
+ """Create or update an auto export job.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :param auto_export_job: Object containing the user-selectable properties of the auto export
+ job. If read-only properties are included, they must match the existing values of those
+ properties. Is either a AutoExportJob type or a IO[bytes] type. Required.
+ :type auto_export_job: ~azure.mgmt.storagecache.models.AutoExportJob or IO[bytes]
+ :return: An instance of LROPoller that returns either AutoExportJob or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagecache.models.AutoExportJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.AutoExportJob] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ auto_export_job_name=auto_export_job_name,
+ auto_export_job=auto_export_job,
+ api_version=api_version,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize("AutoExportJob", pipeline_response.http_response)
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ if polling is True:
+ polling_method: PollingMethod = cast(
+ PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs)
+ )
+ elif polling is False:
+ polling_method = cast(PollingMethod, NoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return LROPoller[_models.AutoExportJob].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return LROPoller[_models.AutoExportJob](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ def _update_initial(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: Union[_models.AutoExportJobUpdate, IO[bytes]],
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(auto_export_job, (IOBase, bytes)):
+ _content = auto_export_job
+ else:
+ _json = self._serialize.body(auto_export_job, "AutoExportJobUpdate")
+
+ _request = build_update_request(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ auto_export_job_name=auto_export_job_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ content_type=content_type,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
+ response_headers["azure-async-operation"] = self._deserialize(
+ "str", response.headers.get("azure-async-operation")
+ )
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @overload
+ def begin_update(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: _models.AutoExportJobUpdate,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.AutoExportJob]:
+ """Update an auto export job instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :param auto_export_job: Object containing the user-selectable properties of the auto export
+ job. If read-only properties are included, they must match the existing values of those
+ properties. Required.
+ :type auto_export_job: ~azure.mgmt.storagecache.models.AutoExportJobUpdate
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns either AutoExportJob or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagecache.models.AutoExportJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ def begin_update(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: IO[bytes],
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> LROPoller[_models.AutoExportJob]:
+ """Update an auto export job instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :param auto_export_job: Object containing the user-selectable properties of the auto export
+ job. If read-only properties are included, they must match the existing values of those
+ properties. Required.
+ :type auto_export_job: IO[bytes]
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: An instance of LROPoller that returns either AutoExportJob or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagecache.models.AutoExportJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace
+ def begin_update(
+ self,
+ resource_group_name: str,
+ aml_filesystem_name: str,
+ auto_export_job_name: str,
+ auto_export_job: Union[_models.AutoExportJobUpdate, IO[bytes]],
+ **kwargs: Any
+ ) -> LROPoller[_models.AutoExportJob]:
+ """Update an auto export job instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :param auto_export_job_name: Name for the auto export job. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type auto_export_job_name: str
+ :param auto_export_job: Object containing the user-selectable properties of the auto export
+ job. If read-only properties are included, they must match the existing values of those
+ properties. Is either a AutoExportJobUpdate type or a IO[bytes] type. Required.
+ :type auto_export_job: ~azure.mgmt.storagecache.models.AutoExportJobUpdate or IO[bytes]
+ :return: An instance of LROPoller that returns either AutoExportJob or the result of
+ cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storagecache.models.AutoExportJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[_models.AutoExportJob] = kwargs.pop("cls", None)
+ polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
+ lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
+ cont_token: Optional[str] = kwargs.pop("continuation_token", None)
+ if cont_token is None:
+ raw_result = self._update_initial(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ auto_export_job_name=auto_export_job_name,
+ auto_export_job=auto_export_job,
+ api_version=api_version,
+ content_type=content_type,
+ cls=lambda x, y, z: x,
+ headers=_headers,
+ params=_params,
+ **kwargs
+ )
+ raw_result.http_response.read() # type: ignore
+ kwargs.pop("error_map", None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize("AutoExportJob", pipeline_response.http_response)
+ if cls:
+ return cls(pipeline_response, deserialized, {}) # type: ignore
+ return deserialized
+
+ if polling is True:
+ polling_method: PollingMethod = cast(
+ PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs)
+ )
+ elif polling is False:
+ polling_method = cast(PollingMethod, NoPolling())
+ else:
+ polling_method = polling
+ if cont_token:
+ return LROPoller[_models.AutoExportJob].from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output,
+ )
+ return LROPoller[_models.AutoExportJob](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
+
+ @distributed_trace
+ def list_by_aml_filesystem(
+ self, resource_group_name: str, aml_filesystem_name: str, **kwargs: Any
+ ) -> Iterable["_models.AutoExportJob"]:
+ """Returns all the auto export jobs the user has access to under an AML File System.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ Required.
+ :type resource_group_name: str
+ :param aml_filesystem_name: Name for the AML file system. Allows alphanumerics, underscores,
+ and hyphens. Start and end with alphanumeric. Required.
+ :type aml_filesystem_name: str
+ :return: An iterator like instance of either AutoExportJob or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storagecache.models.AutoExportJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[_models.AutoExportJobsListResult] = kwargs.pop("cls", None)
+
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ def prepare_request(next_link=None):
+ if not next_link:
+
+ _request = build_list_by_aml_filesystem_request(
+ resource_group_name=resource_group_name,
+ aml_filesystem_name=aml_filesystem_name,
+ subscription_id=self._config.subscription_id,
+ api_version=api_version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ else:
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize("AutoExportJobsListResult", pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem) # type: ignore
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ _request = prepare_request(next_link)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(get_next, extract_data)
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_caches_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_caches_operations.py
index 51a635d8c0fb..f7971e18e255 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_caches_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_caches_operations.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines,too-many-statements
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +7,8 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, List, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -16,13 +17,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -30,8 +32,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import StorageCacheManagementClientMixinABC, _convert_request
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -43,7 +48,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -67,7 +72,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -97,7 +102,7 @@ def build_delete_request(resource_group_name: str, cache_name: str, subscription
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -128,7 +133,7 @@ def build_get_request(resource_group_name: str, cache_name: str, subscription_id
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -161,7 +166,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -195,7 +200,7 @@ def build_update_request(resource_group_name: str, cache_name: str, subscription
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -231,7 +236,7 @@ def build_debug_info_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -262,7 +267,7 @@ def build_flush_request(resource_group_name: str, cache_name: str, subscription_
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -293,7 +298,7 @@ def build_start_request(resource_group_name: str, cache_name: str, subscription_
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -324,7 +329,7 @@ def build_stop_request(resource_group_name: str, cache_name: str, subscription_i
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -357,7 +362,7 @@ def build_start_priming_job_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -393,7 +398,7 @@ def build_stop_priming_job_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -429,7 +434,7 @@ def build_pause_priming_job_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -465,7 +470,7 @@ def build_resume_priming_job_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -501,7 +506,7 @@ def build_upgrade_firmware_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -534,7 +539,7 @@ def build_space_allocation_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -597,7 +602,7 @@ def list(self, **kwargs: Any) -> Iterable["_models.Cache"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.CachesListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -614,7 +619,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -630,7 +634,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
@@ -676,7 +679,7 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.CachesListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -694,7 +697,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -710,7 +712,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
@@ -739,10 +740,8 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- def _delete_initial( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, cache_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ def _delete_initial(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -754,7 +753,7 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
_request = build_delete_request(
resource_group_name=resource_group_name,
@@ -764,10 +763,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -775,6 +774,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -785,8 +788,12 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> LROPoller[None]:
@@ -811,7 +818,7 @@ def begin_delete(self, resource_group_name: str, cache_name: str, **kwargs: Any)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
api_version=api_version,
@@ -820,6 +827,7 @@ def begin_delete(self, resource_group_name: str, cache_name: str, **kwargs: Any)
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -855,7 +863,7 @@ def get(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> _mode
:rtype: ~azure.mgmt.storagecache.models.Cache
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -877,7 +885,6 @@ def get(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> _mode
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -891,7 +898,7 @@ def get(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> _mode
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("Cache", pipeline_response)
+ deserialized = self._deserialize("Cache", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
@@ -900,8 +907,8 @@ def get(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> _mode
def _create_or_update_initial(
self, resource_group_name: str, cache_name: str, cache: Union[_models.Cache, IO[bytes]], **kwargs: Any
- ) -> Optional[_models.Cache]:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -914,7 +921,7 @@ def _create_or_update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.Cache]] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -935,10 +942,10 @@ def _create_or_update_initial(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -946,15 +953,14 @@ def _create_or_update_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize("Cache", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("Cache", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
@@ -1060,10 +1066,11 @@ def begin_create_or_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("Cache", pipeline_response)
+ deserialized = self._deserialize("Cache", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -1091,8 +1098,8 @@ def _update_initial(
cache_name: str,
cache: Optional[Union[_models.Cache, IO[bytes]]] = None,
**kwargs: Any
- ) -> Optional[_models.Cache]:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1105,7 +1112,7 @@ def _update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.Cache]] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -1129,10 +1136,10 @@ def _update_initial(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -1140,20 +1147,22 @@ def _update_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
- if response.status_code == 200:
- deserialized = self._deserialize("Cache", pipeline_response)
-
if response.status_code == 202:
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["Azure-AsyncOperation"] = self._deserialize(
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -1264,10 +1273,11 @@ def begin_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("Cache", pipeline_response)
+ deserialized = self._deserialize("Cache", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -1291,10 +1301,8 @@ def get_long_running_output(pipeline_response):
self._client, raw_result, get_long_running_output, polling_method # type: ignore
)
- def _debug_info_initial( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, cache_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ def _debug_info_initial(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1306,7 +1314,7 @@ def _debug_info_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
_request = build_debug_info_request(
resource_group_name=resource_group_name,
@@ -1316,10 +1324,10 @@ def _debug_info_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -1327,6 +1335,10 @@ def _debug_info_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -1337,8 +1349,12 @@ def _debug_info_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace
def begin_debug_info(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> LROPoller[None]:
@@ -1363,7 +1379,7 @@ def begin_debug_info(self, resource_group_name: str, cache_name: str, **kwargs:
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._debug_info_initial( # type: ignore
+ raw_result = self._debug_info_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
api_version=api_version,
@@ -1372,6 +1388,7 @@ def begin_debug_info(self, resource_group_name: str, cache_name: str, **kwargs:
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -1395,10 +1412,8 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _flush_initial( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, cache_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ def _flush_initial(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1410,7 +1425,7 @@ def _flush_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
_request = build_flush_request(
resource_group_name=resource_group_name,
@@ -1420,10 +1435,10 @@ def _flush_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -1431,6 +1446,10 @@ def _flush_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -1441,8 +1460,12 @@ def _flush_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace
def begin_flush(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> LROPoller[None]:
@@ -1468,7 +1491,7 @@ def begin_flush(self, resource_group_name: str, cache_name: str, **kwargs: Any)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._flush_initial( # type: ignore
+ raw_result = self._flush_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
api_version=api_version,
@@ -1477,6 +1500,7 @@ def begin_flush(self, resource_group_name: str, cache_name: str, **kwargs: Any)
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -1500,10 +1524,8 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _start_initial( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, cache_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ def _start_initial(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1515,7 +1537,7 @@ def _start_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
_request = build_start_request(
resource_group_name=resource_group_name,
@@ -1525,10 +1547,10 @@ def _start_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -1536,6 +1558,10 @@ def _start_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -1546,8 +1572,12 @@ def _start_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace
def begin_start(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> LROPoller[None]:
@@ -1572,7 +1602,7 @@ def begin_start(self, resource_group_name: str, cache_name: str, **kwargs: Any)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._start_initial( # type: ignore
+ raw_result = self._start_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
api_version=api_version,
@@ -1581,6 +1611,7 @@ def begin_start(self, resource_group_name: str, cache_name: str, **kwargs: Any)
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -1604,10 +1635,8 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _stop_initial( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, cache_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ def _stop_initial(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1619,7 +1648,7 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
_request = build_stop_request(
resource_group_name=resource_group_name,
@@ -1629,10 +1658,10 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -1640,6 +1669,10 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -1650,8 +1683,12 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace
def begin_stop(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> LROPoller[None]:
@@ -1676,7 +1713,7 @@ def begin_stop(self, resource_group_name: str, cache_name: str, **kwargs: Any) -
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._stop_initial( # type: ignore
+ raw_result = self._stop_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
api_version=api_version,
@@ -1685,6 +1722,7 @@ def begin_stop(self, resource_group_name: str, cache_name: str, **kwargs: Any) -
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -1708,14 +1746,14 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _start_priming_job_initial( # pylint: disable=inconsistent-return-statements
+ def _start_priming_job_initial(
self,
resource_group_name: str,
cache_name: str,
primingjob: Optional[Union[_models.PrimingJob, IO[bytes]]] = None,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1728,7 +1766,7 @@ def _start_priming_job_initial( # pylint: disable=inconsistent-return-statement
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -1752,10 +1790,10 @@ def _start_priming_job_initial( # pylint: disable=inconsistent-return-statement
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -1763,6 +1801,10 @@ def _start_priming_job_initial( # pylint: disable=inconsistent-return-statement
response = pipeline_response.http_response
if response.status_code not in [202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -1772,8 +1814,12 @@ def _start_priming_job_initial( # pylint: disable=inconsistent-return-statement
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@overload
def begin_start_priming_job(
@@ -1864,7 +1910,7 @@ def begin_start_priming_job(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._start_priming_job_initial( # type: ignore
+ raw_result = self._start_priming_job_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
primingjob=primingjob,
@@ -1875,6 +1921,7 @@ def begin_start_priming_job(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -1898,14 +1945,14 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _stop_priming_job_initial( # pylint: disable=inconsistent-return-statements
+ def _stop_priming_job_initial(
self,
resource_group_name: str,
cache_name: str,
priming_job_id: Optional[Union[_models.PrimingJobIdParameter, IO[bytes]]] = None,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1918,7 +1965,7 @@ def _stop_priming_job_initial( # pylint: disable=inconsistent-return-statements
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -1942,10 +1989,10 @@ def _stop_priming_job_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -1953,6 +2000,10 @@ def _stop_priming_job_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -1963,8 +2014,12 @@ def _stop_priming_job_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@overload
def begin_stop_priming_job(
@@ -2055,7 +2110,7 @@ def begin_stop_priming_job(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._stop_priming_job_initial( # type: ignore
+ raw_result = self._stop_priming_job_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
priming_job_id=priming_job_id,
@@ -2066,6 +2121,7 @@ def begin_stop_priming_job(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -2089,14 +2145,14 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _pause_priming_job_initial( # pylint: disable=inconsistent-return-statements
+ def _pause_priming_job_initial(
self,
resource_group_name: str,
cache_name: str,
priming_job_id: Optional[Union[_models.PrimingJobIdParameter, IO[bytes]]] = None,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2109,7 +2165,7 @@ def _pause_priming_job_initial( # pylint: disable=inconsistent-return-statement
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -2133,10 +2189,10 @@ def _pause_priming_job_initial( # pylint: disable=inconsistent-return-statement
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -2144,6 +2200,10 @@ def _pause_priming_job_initial( # pylint: disable=inconsistent-return-statement
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -2154,8 +2214,12 @@ def _pause_priming_job_initial( # pylint: disable=inconsistent-return-statement
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@overload
def begin_pause_priming_job(
@@ -2246,7 +2310,7 @@ def begin_pause_priming_job(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._pause_priming_job_initial( # type: ignore
+ raw_result = self._pause_priming_job_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
priming_job_id=priming_job_id,
@@ -2257,6 +2321,7 @@ def begin_pause_priming_job(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -2280,14 +2345,14 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _resume_priming_job_initial( # pylint: disable=inconsistent-return-statements
+ def _resume_priming_job_initial(
self,
resource_group_name: str,
cache_name: str,
priming_job_id: Optional[Union[_models.PrimingJobIdParameter, IO[bytes]]] = None,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2300,7 +2365,7 @@ def _resume_priming_job_initial( # pylint: disable=inconsistent-return-statemen
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -2324,10 +2389,10 @@ def _resume_priming_job_initial( # pylint: disable=inconsistent-return-statemen
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -2335,6 +2400,10 @@ def _resume_priming_job_initial( # pylint: disable=inconsistent-return-statemen
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -2345,8 +2414,12 @@ def _resume_priming_job_initial( # pylint: disable=inconsistent-return-statemen
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@overload
def begin_resume_priming_job(
@@ -2437,7 +2510,7 @@ def begin_resume_priming_job(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._resume_priming_job_initial( # type: ignore
+ raw_result = self._resume_priming_job_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
priming_job_id=priming_job_id,
@@ -2448,6 +2521,7 @@ def begin_resume_priming_job(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -2471,10 +2545,8 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _upgrade_firmware_initial( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, cache_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ def _upgrade_firmware_initial(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2486,7 +2558,7 @@ def _upgrade_firmware_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
_request = build_upgrade_firmware_request(
resource_group_name=resource_group_name,
@@ -2496,10 +2568,10 @@ def _upgrade_firmware_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -2507,6 +2579,10 @@ def _upgrade_firmware_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [201, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -2517,8 +2593,12 @@ def _upgrade_firmware_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace
def begin_upgrade_firmware(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> LROPoller[None]:
@@ -2544,7 +2624,7 @@ def begin_upgrade_firmware(self, resource_group_name: str, cache_name: str, **kw
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._upgrade_firmware_initial( # type: ignore
+ raw_result = self._upgrade_firmware_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
api_version=api_version,
@@ -2553,6 +2633,7 @@ def begin_upgrade_firmware(self, resource_group_name: str, cache_name: str, **kw
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -2576,14 +2657,14 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _space_allocation_initial( # pylint: disable=inconsistent-return-statements
+ def _space_allocation_initial(
self,
resource_group_name: str,
cache_name: str,
space_allocation: Optional[Union[List[_models.StorageTargetSpaceAllocation], IO[bytes]]] = None,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -2596,7 +2677,7 @@ def _space_allocation_initial( # pylint: disable=inconsistent-return-statements
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -2620,10 +2701,10 @@ def _space_allocation_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -2631,6 +2712,10 @@ def _space_allocation_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -2640,8 +2725,12 @@ def _space_allocation_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@overload
def begin_space_allocation(
@@ -2735,7 +2824,7 @@ def begin_space_allocation(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._space_allocation_initial( # type: ignore
+ raw_result = self._space_allocation_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
space_allocation=space_allocation,
@@ -2746,6 +2835,7 @@ def begin_space_allocation(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_import_jobs_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_import_jobs_operations.py
index 502f65d22596..aca9f715f738 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_import_jobs_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_import_jobs_operations.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines,too-many-statements
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +7,8 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -16,13 +17,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -30,8 +32,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import StorageCacheManagementClientMixinABC, _convert_request
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -45,7 +50,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -93,7 +98,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -141,7 +146,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -192,7 +197,7 @@ def build_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -243,7 +248,7 @@ def build_list_by_aml_filesystem_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -296,10 +301,10 @@ def __init__(self, *args, **kwargs):
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
- def _delete_initial( # pylint: disable=inconsistent-return-statements
+ def _delete_initial(
self, resource_group_name: str, aml_filesystem_name: str, import_job_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -311,7 +316,7 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
_request = build_delete_request(
resource_group_name=resource_group_name,
@@ -322,10 +327,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -333,6 +338,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
@@ -344,8 +353,12 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(
@@ -375,7 +388,7 @@ def begin_delete(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
aml_filesystem_name=aml_filesystem_name,
import_job_name=import_job_name,
@@ -385,6 +398,7 @@ def begin_delete(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -427,7 +441,7 @@ def get(
:rtype: ~azure.mgmt.storagecache.models.ImportJob
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -450,7 +464,6 @@ def get(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -465,7 +478,7 @@ def get(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("ImportJob", pipeline_response)
+ deserialized = self._deserialize("ImportJob", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
@@ -479,8 +492,8 @@ def _create_or_update_initial(
import_job_name: str,
import_job: Union[_models.ImportJob, IO[bytes]],
**kwargs: Any
- ) -> _models.ImportJob:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -493,7 +506,7 @@ def _create_or_update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.ImportJob] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -515,10 +528,10 @@ def _create_or_update_initial(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -526,20 +539,21 @@ def _create_or_update_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
- if response.status_code == 200:
- deserialized = self._deserialize("ImportJob", pipeline_response)
-
if response.status_code == 201:
response_headers["azure-async-operation"] = self._deserialize(
"str", response.headers.get("azure-async-operation")
)
- deserialized = self._deserialize("ImportJob", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -557,8 +571,7 @@ def begin_create_or_update(
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.ImportJob]:
- """Create or update an import job. Import jobs are automatically deleted 72 hours after
- completion.
+ """Create or update an import job.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
@@ -592,8 +605,7 @@ def begin_create_or_update(
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.ImportJob]:
- """Create or update an import job. Import jobs are automatically deleted 72 hours after
- completion.
+ """Create or update an import job.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
@@ -625,8 +637,7 @@ def begin_create_or_update(
import_job: Union[_models.ImportJob, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.ImportJob]:
- """Create or update an import job. Import jobs are automatically deleted 72 hours after
- completion.
+ """Create or update an import job.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
@@ -667,10 +678,11 @@ def begin_create_or_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("ImportJob", pipeline_response)
+ deserialized = self._deserialize("ImportJob", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -701,8 +713,8 @@ def _update_initial(
import_job_name: str,
import_job: Union[_models.ImportJobUpdate, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.ImportJob]:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -715,7 +727,7 @@ def _update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.ImportJob]] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -737,10 +749,10 @@ def _update_initial(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -748,21 +760,23 @@ def _update_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
response_headers = {}
- if response.status_code == 200:
- deserialized = self._deserialize("ImportJob", pipeline_response)
-
if response.status_code == 202:
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
response_headers["azure-async-operation"] = self._deserialize(
"str", response.headers.get("azure-async-operation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -886,10 +900,11 @@ def begin_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("ImportJob", pipeline_response)
+ deserialized = self._deserialize("ImportJob", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -935,7 +950,7 @@ def list_by_aml_filesystem(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ImportJobsListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -954,7 +969,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -970,7 +984,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_operations.py
index 703946e08c55..a6f927b3f482 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
import urllib.parse
@@ -19,16 +19,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import StorageCacheManagementClientMixinABC, _convert_request
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -40,7 +42,7 @@ def build_list_request(**kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -88,7 +90,7 @@ def list(self, **kwargs: Any) -> Iterable["_models.ApiOperation"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ApiOperationListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -104,7 +106,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -120,7 +121,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_skus_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_skus_operations.py
index 4fd50c68c962..33e0d52315ad 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_skus_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_skus_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
import urllib.parse
@@ -19,16 +19,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import StorageCacheManagementClientMixinABC, _convert_request
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -40,7 +42,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -93,7 +95,7 @@ def list(self, **kwargs: Any) -> Iterable["_models.ResourceSku"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.ResourceSkusResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -110,7 +112,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -126,7 +127,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_storage_cache_management_client_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_storage_cache_management_client_operations.py
index 3f5fee840f6d..525bf3df150b 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_storage_cache_management_client_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_storage_cache_management_client_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,6 +6,7 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
+import sys
from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
@@ -18,16 +18,19 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import StorageCacheManagementClientMixinABC, _convert_request
+from .._vendor import StorageCacheManagementClientMixinABC
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -39,7 +42,7 @@ def build_check_aml_fs_subnets_request(subscription_id: str, **kwargs: Any) -> H
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -70,7 +73,7 @@ def build_get_required_aml_fs_subnets_size_request( # pylint: disable=name-too-
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -98,8 +101,9 @@ def build_get_required_aml_fs_subnets_size_request( # pylint: disable=name-too-
class StorageCacheManagementClientOperationsMixin( # pylint: disable=name-too-long
StorageCacheManagementClientMixinABC
):
+
@overload
- def check_aml_fs_subnets( # pylint: disable=inconsistent-return-statements
+ def check_aml_fs_subnets(
self,
aml_filesystem_subnet_info: Optional[_models.AmlFilesystemSubnetInfo] = None,
*,
@@ -120,7 +124,7 @@ def check_aml_fs_subnets( # pylint: disable=inconsistent-return-statements
"""
@overload
- def check_aml_fs_subnets( # pylint: disable=inconsistent-return-statements
+ def check_aml_fs_subnets(
self,
aml_filesystem_subnet_info: Optional[IO[bytes]] = None,
*,
@@ -156,16 +160,11 @@ def check_aml_fs_subnets( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
- 400: lambda response: HttpResponseError(
- response=response,
- model=self._deserialize(_models.AmlFilesystemCheckSubnetError, response),
- error_format=ARMErrorFormat,
- ),
}
error_map.update(kwargs.pop("error_map", {}) or {})
@@ -196,7 +195,6 @@ def check_aml_fs_subnets( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -208,7 +206,10 @@ def check_aml_fs_subnets( # pylint: disable=inconsistent-return-statements
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+ error = None
+ if response.status_code == 400:
+ error = self._deserialize.failsafe_deserialize(_models.AmlFilesystemCheckSubnetError, pipeline_response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {}) # type: ignore
@@ -275,7 +276,7 @@ def get_required_aml_fs_subnets_size(
:rtype: ~azure.mgmt.storagecache.models.RequiredAmlFilesystemSubnetsSize
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -312,7 +313,6 @@ def get_required_aml_fs_subnets_size(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -326,7 +326,7 @@ def get_required_aml_fs_subnets_size(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("RequiredAmlFilesystemSubnetsSize", pipeline_response)
+ deserialized = self._deserialize("RequiredAmlFilesystemSubnetsSize", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_storage_target_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_storage_target_operations.py
index be631936f6ed..c7163d1b5b95 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_storage_target_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_storage_target_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,7 +5,8 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Optional, TypeVar, Union, cast
+import sys
+from typing import Any, Callable, Dict, Iterator, Optional, TypeVar, Union, cast
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -14,12 +14,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -27,8 +28,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import StorageCacheManagementClientMixinABC, _convert_request
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -42,7 +46,7 @@ def build_flush_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -78,7 +82,7 @@ def build_suspend_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -114,7 +118,7 @@ def build_resume_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -150,7 +154,7 @@ def build_invalidate_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -199,10 +203,10 @@ def __init__(self, *args, **kwargs):
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
- def _flush_initial( # pylint: disable=inconsistent-return-statements
+ def _flush_initial(
self, resource_group_name: str, cache_name: str, storage_target_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -214,7 +218,7 @@ def _flush_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
_request = build_flush_request(
resource_group_name=resource_group_name,
@@ -225,10 +229,10 @@ def _flush_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -236,6 +240,10 @@ def _flush_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -246,8 +254,12 @@ def _flush_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace
def begin_flush(
@@ -278,7 +290,7 @@ def begin_flush(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._flush_initial( # type: ignore
+ raw_result = self._flush_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
storage_target_name=storage_target_name,
@@ -288,6 +300,7 @@ def begin_flush(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -311,10 +324,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _suspend_initial( # pylint: disable=inconsistent-return-statements
+ def _suspend_initial(
self, resource_group_name: str, cache_name: str, storage_target_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -326,7 +339,7 @@ def _suspend_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
_request = build_suspend_request(
resource_group_name=resource_group_name,
@@ -337,10 +350,10 @@ def _suspend_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -348,6 +361,10 @@ def _suspend_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -358,8 +375,12 @@ def _suspend_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace
def begin_suspend(
@@ -388,7 +409,7 @@ def begin_suspend(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._suspend_initial( # type: ignore
+ raw_result = self._suspend_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
storage_target_name=storage_target_name,
@@ -398,6 +419,7 @@ def begin_suspend(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -421,10 +443,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _resume_initial( # pylint: disable=inconsistent-return-statements
+ def _resume_initial(
self, resource_group_name: str, cache_name: str, storage_target_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -436,7 +458,7 @@ def _resume_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
_request = build_resume_request(
resource_group_name=resource_group_name,
@@ -447,10 +469,10 @@ def _resume_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -458,6 +480,10 @@ def _resume_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -468,8 +494,12 @@ def _resume_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace
def begin_resume(
@@ -498,7 +528,7 @@ def begin_resume(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._resume_initial( # type: ignore
+ raw_result = self._resume_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
storage_target_name=storage_target_name,
@@ -508,6 +538,7 @@ def begin_resume(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -531,10 +562,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
)
return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
- def _invalidate_initial( # pylint: disable=inconsistent-return-statements
+ def _invalidate_initial(
self, resource_group_name: str, cache_name: str, storage_target_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -546,7 +577,7 @@ def _invalidate_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
_request = build_invalidate_request(
resource_group_name=resource_group_name,
@@ -557,10 +588,10 @@ def _invalidate_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -568,6 +599,10 @@ def _invalidate_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -578,8 +613,12 @@ def _invalidate_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace
def begin_invalidate(
@@ -609,7 +648,7 @@ def begin_invalidate(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._invalidate_initial( # type: ignore
+ raw_result = self._invalidate_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
storage_target_name=storage_target_name,
@@ -619,6 +658,7 @@ def begin_invalidate(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_storage_targets_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_storage_targets_operations.py
index e93513d343e0..fa40d9049beb 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_storage_targets_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_storage_targets_operations.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines,too-many-statements
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +7,8 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -16,13 +17,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -30,8 +32,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import StorageCacheManagementClientMixinABC, _convert_request
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -45,7 +50,7 @@ def build_dns_refresh_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -81,7 +86,7 @@ def build_list_by_cache_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -120,7 +125,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -158,7 +163,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -194,7 +199,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -233,7 +238,7 @@ def build_restore_defaults_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -282,10 +287,10 @@ def __init__(self, *args, **kwargs):
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
- def _dns_refresh_initial( # pylint: disable=inconsistent-return-statements
+ def _dns_refresh_initial(
self, resource_group_name: str, cache_name: str, storage_target_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -297,7 +302,7 @@ def _dns_refresh_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
_request = build_dns_refresh_request(
resource_group_name=resource_group_name,
@@ -308,10 +313,10 @@ def _dns_refresh_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -319,6 +324,10 @@ def _dns_refresh_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -329,8 +338,12 @@ def _dns_refresh_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace
def begin_dns_refresh(
@@ -359,7 +372,7 @@ def begin_dns_refresh(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._dns_refresh_initial( # type: ignore
+ raw_result = self._dns_refresh_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
storage_target_name=storage_target_name,
@@ -369,6 +382,7 @@ def begin_dns_refresh(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -414,7 +428,7 @@ def list_by_cache(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.StorageTargetsResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -433,7 +447,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -449,7 +462,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
@@ -478,15 +490,15 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- def _delete_initial( # pylint: disable=inconsistent-return-statements
+ def _delete_initial(
self,
resource_group_name: str,
cache_name: str,
storage_target_name: str,
force: Optional[str] = None,
**kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -498,7 +510,7 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
_request = build_delete_request(
resource_group_name=resource_group_name,
@@ -510,10 +522,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -521,6 +533,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -531,8 +547,12 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(
@@ -573,7 +593,7 @@ def begin_delete(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
storage_target_name=storage_target_name,
@@ -584,6 +604,7 @@ def begin_delete(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
@@ -623,7 +644,7 @@ def get(
:rtype: ~azure.mgmt.storagecache.models.StorageTarget
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -646,7 +667,6 @@ def get(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_stream = False
@@ -660,7 +680,7 @@ def get(
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = self._deserialize("StorageTarget", pipeline_response)
+ deserialized = self._deserialize("StorageTarget", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
@@ -674,8 +694,8 @@ def _create_or_update_initial(
storage_target_name: str,
storagetarget: Union[_models.StorageTarget, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.StorageTarget]:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -688,7 +708,7 @@ def _create_or_update_initial(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.StorageTarget]] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -710,10 +730,10 @@ def _create_or_update_initial(
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -721,15 +741,14 @@ def _create_or_update_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize("StorageTarget", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("StorageTarget", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
@@ -855,10 +874,11 @@ def begin_create_or_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("StorageTarget", pipeline_response)
+ deserialized = self._deserialize("StorageTarget", pipeline_response.http_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
@@ -880,10 +900,10 @@ def get_long_running_output(pipeline_response):
self._client, raw_result, get_long_running_output, polling_method # type: ignore
)
- def _restore_defaults_initial( # pylint: disable=inconsistent-return-statements
+ def _restore_defaults_initial(
self, resource_group_name: str, cache_name: str, storage_target_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -895,7 +915,7 @@ def _restore_defaults_initial( # pylint: disable=inconsistent-return-statements
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
_request = build_restore_defaults_request(
resource_group_name=resource_group_name,
@@ -906,10 +926,10 @@ def _restore_defaults_initial( # pylint: disable=inconsistent-return-statements
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)
@@ -917,6 +937,10 @@ def _restore_defaults_initial( # pylint: disable=inconsistent-return-statements
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
@@ -927,8 +951,12 @@ def _restore_defaults_initial( # pylint: disable=inconsistent-return-statements
"str", response.headers.get("Azure-AsyncOperation")
)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, response_headers) # type: ignore
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
@distributed_trace
def begin_restore_defaults(
@@ -957,7 +985,7 @@ def begin_restore_defaults(
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._restore_defaults_initial( # type: ignore
+ raw_result = self._restore_defaults_initial(
resource_group_name=resource_group_name,
cache_name=cache_name,
storage_target_name=storage_target_name,
@@ -967,6 +995,7 @@ def begin_restore_defaults(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
diff --git a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_usage_models_operations.py b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_usage_models_operations.py
index e452ca47fac3..4274ad7a4c28 100644
--- a/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_usage_models_operations.py
+++ b/sdk/storage/azure-mgmt-storagecache/azure/mgmt/storagecache/operations/_usage_models_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
import urllib.parse
@@ -19,16 +19,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import StorageCacheManagementClientMixinABC, _convert_request
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -40,7 +42,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -93,7 +95,7 @@ def list(self, **kwargs: Any) -> Iterable["_models.UsageModel"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.UsageModelsResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -110,7 +112,6 @@ def prepare_request(next_link=None):
headers=_headers,
params=_params,
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
else:
@@ -126,7 +127,6 @@ def prepare_request(next_link=None):
_request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
- _request = _convert_request(_request)
_request.url = self._client.format_url(_request.url)
_request.method = "GET"
return _request
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_archive.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_archive.py
index 9cfbf5eac162..41cc6214b467 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_archive.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_archive.py
@@ -36,6 +36,6 @@ def main():
)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_Archive.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/amlFilesystems_Archive.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_cancel_archive.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_cancel_archive.py
index 0d60cd825612..214ed47d638a 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_cancel_archive.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_cancel_archive.py
@@ -36,6 +36,6 @@ def main():
)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_CancelArchive.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/amlFilesystems_CancelArchive.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_create_or_update.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_create_or_update.py
index 2d414a0676ed..1609f413481e 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_create_or_update.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_create_or_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.storagecache import StorageCacheManagementClient
@@ -77,6 +75,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_CreateOrUpdate.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/amlFilesystems_CreateOrUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_delete.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_delete.py
index 573271d4d968..16c2d90fa5f6 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_delete.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_delete.py
@@ -36,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_Delete.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/amlFilesystems_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_get.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_get.py
index 33c5a693732d..27b253451b35 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_get.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_get.py
@@ -37,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_Get.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/amlFilesystems_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_list.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_list.py
index fd633840235a..8993fc2c9af3 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_list.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_list.py
@@ -35,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_List.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/amlFilesystems_List.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_list_by_resource_group.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_list_by_resource_group.py
index dd9ec341097c..f6293c052abc 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_list_by_resource_group.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_list_by_resource_group.py
@@ -37,6 +37,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_ListByResourceGroup.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/amlFilesystems_ListByResourceGroup.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_update.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_update.py
index 129c8a91597e..51dcf0cee8e7 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_update.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/aml_filesystems_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.storagecache import StorageCacheManagementClient
@@ -59,6 +57,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_Update.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/amlFilesystems_Update.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/asc_operations_get.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/asc_operations_get.py
index 25d52ef9f1a3..14d56de778d2 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/asc_operations_get.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/asc_operations_get.py
@@ -37,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/AscOperations_Get.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/AscOperations_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/asc_resource_usages_get.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/asc_resource_usages_get.py
index c460c213c155..f77a39c9b5b7 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/asc_resource_usages_get.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/asc_resource_usages_get.py
@@ -37,6 +37,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/AscResourceUsages_Get.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/AscResourceUsages_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/auto_export_jobs_create_or_update.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/auto_export_jobs_create_or_update.py
new file mode 100644
index 000000000000..70501537c5c5
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/auto_export_jobs_create_or_update.py
@@ -0,0 +1,48 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-storagecache
+# USAGE
+ python auto_export_jobs_create_or_update.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = StorageCacheManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-0000-0000-0000-000000000000",
+ )
+
+ response = client.auto_export_jobs.begin_create_or_update(
+ resource_group_name="scgroup",
+ aml_filesystem_name="fs1",
+ auto_export_job_name="job1",
+ auto_export_job={
+ "location": "eastus",
+ "properties": {"autoExportPrefixes": ["/"]},
+ "tags": {"Dept": "ContosoAds"},
+ },
+ ).result()
+ print(response)
+
+
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/autoExportJobs_CreateOrUpdate.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/auto_export_jobs_delete.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/auto_export_jobs_delete.py
new file mode 100644
index 000000000000..5e75dc02a0f5
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/auto_export_jobs_delete.py
@@ -0,0 +1,42 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-storagecache
+# USAGE
+ python auto_export_jobs_delete.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = StorageCacheManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-0000-0000-0000-000000000000",
+ )
+
+ client.auto_export_jobs.begin_delete(
+ resource_group_name="scgroup",
+ aml_filesystem_name="fs1",
+ auto_export_job_name="job1",
+ ).result()
+
+
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/autoExportJobs_Delete.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/auto_export_jobs_get.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/auto_export_jobs_get.py
new file mode 100644
index 000000000000..1181dc1992f3
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/auto_export_jobs_get.py
@@ -0,0 +1,43 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-storagecache
+# USAGE
+ python auto_export_jobs_get.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = StorageCacheManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-0000-0000-0000-000000000000",
+ )
+
+ response = client.auto_export_jobs.get(
+ resource_group_name="scgroup",
+ aml_filesystem_name="fs1",
+ auto_export_job_name="job1",
+ )
+ print(response)
+
+
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/autoExportJobs_Get.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/auto_export_jobs_list_by_aml_filesystem.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/auto_export_jobs_list_by_aml_filesystem.py
new file mode 100644
index 000000000000..f77520b6746f
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/auto_export_jobs_list_by_aml_filesystem.py
@@ -0,0 +1,43 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-storagecache
+# USAGE
+ python auto_export_jobs_list_by_aml_filesystem.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = StorageCacheManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-0000-0000-0000-000000000000",
+ )
+
+ response = client.auto_export_jobs.list_by_aml_filesystem(
+ resource_group_name="scgroup",
+ aml_filesystem_name="fs1",
+ )
+ for item in response:
+ print(item)
+
+
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/autoExportJobs_ListByAmlFilesystem.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/auto_export_jobs_update.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/auto_export_jobs_update.py
new file mode 100644
index 000000000000..844f49c027ff
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/auto_export_jobs_update.py
@@ -0,0 +1,44 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-storagecache
+# USAGE
+ python auto_export_jobs_update.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = StorageCacheManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="00000000-0000-0000-0000-000000000000",
+ )
+
+ response = client.auto_export_jobs.begin_update(
+ resource_group_name="scgroup",
+ aml_filesystem_name="fs1",
+ auto_export_job_name="job1",
+ auto_export_job={"tags": {"Dept": "ContosoAds"}},
+ ).result()
+ print(response)
+
+
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/autoExportJobs_Update.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_create_or_update.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_create_or_update.py
index 60c16b3caffd..cf0db4240ad9 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_create_or_update.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_create_or_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.storagecache import StorageCacheManagementClient
@@ -99,6 +97,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_CreateOrUpdate.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/Caches_CreateOrUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_create_or_update_ldap_only.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_create_or_update_ldap_only.py
index f2351e60bd3f..655b8f295b00 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_create_or_update_ldap_only.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_create_or_update_ldap_only.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.storagecache import StorageCacheManagementClient
@@ -85,6 +83,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_CreateOrUpdate_ldap_only.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/Caches_CreateOrUpdate_ldap_only.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_debug_info.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_debug_info.py
index 7a6cf79b55b5..ce5dab13a1e3 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_debug_info.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_debug_info.py
@@ -36,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_DebugInfo.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/Caches_DebugInfo.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_delete.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_delete.py
index fbe5406f59fa..e47b86ed4ec5 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_delete.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_delete.py
@@ -36,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Delete.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/Caches_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_flush.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_flush.py
index 0f141a167641..2fe885b429ed 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_flush.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_flush.py
@@ -36,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Flush.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/Caches_Flush.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_get.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_get.py
index 5e1cf35f912a..b1a8c7c883f4 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_get.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_get.py
@@ -37,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Get.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/Caches_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_list.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_list.py
index eabc80b7ca81..9eccdfd33e02 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_list.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_list.py
@@ -35,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_List.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/Caches_List.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_list_by_resource_group.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_list_by_resource_group.py
index f40f68416cbf..62488936e91a 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_list_by_resource_group.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_list_by_resource_group.py
@@ -37,6 +37,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_ListByResourceGroup.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/Caches_ListByResourceGroup.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_start.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_start.py
index 20450892e0c9..82dc8a35d2fb 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_start.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_start.py
@@ -36,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Start.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/Caches_Start.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_stop.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_stop.py
index 2114f9eecb27..37be2b9c032e 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_stop.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_stop.py
@@ -36,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Stop.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/Caches_Stop.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_update.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_update.py
index fcc47940aebd..09606a77b81d 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_update.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_update.py
@@ -37,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Update.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/Caches_Update.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_update_ldap_only.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_update_ldap_only.py
index 52b2744ce8fb..2ac5c6a9d4f3 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_update_ldap_only.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_update_ldap_only.py
@@ -37,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Update_ldap_only.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/Caches_Update_ldap_only.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_upgrade_firmware.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_upgrade_firmware.py
index 5ab364792c37..faf7e8e5b876 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_upgrade_firmware.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/caches_upgrade_firmware.py
@@ -36,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_UpgradeFirmware.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/Caches_UpgradeFirmware.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/check_aml_fs_subnets.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/check_aml_fs_subnets.py
index 431e9319b075..e727561aeb59 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/check_aml_fs_subnets.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/check_aml_fs_subnets.py
@@ -33,6 +33,6 @@ def main():
client.check_aml_fs_subnets()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/checkAmlFSSubnets.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/checkAmlFSSubnets.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/get_required_aml_fs_subnets_size.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/get_required_aml_fs_subnets_size.py
index a9c458327303..5c01c4a40800 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/get_required_aml_fs_subnets_size.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/get_required_aml_fs_subnets_size.py
@@ -34,6 +34,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/getRequiredAmlFSSubnetsSize.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/getRequiredAmlFSSubnetsSize.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/import_job_update.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/import_job_update.py
index cc8ef3ca6055..fd8f18ce18d7 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/import_job_update.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/import_job_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.storagecache import StorageCacheManagementClient
@@ -41,6 +39,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/importJob_Update.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/importJob_Update.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_create_or_update.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_create_or_update.py
index 5a6f07455190..27f5e8ed2784 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_create_or_update.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_create_or_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.storagecache import StorageCacheManagementClient
@@ -45,6 +43,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/importJobs_CreateOrUpdate.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/importJobs_CreateOrUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_delete.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_delete.py
index 5e2fb58a6134..5bf618549a1a 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_delete.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_delete.py
@@ -37,6 +37,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/importJobs_Delete.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/importJobs_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_get.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_get.py
index ec9636f486a6..3dae7e2100ce 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_get.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_get.py
@@ -38,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/importJobs_Get.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/importJobs_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_list_by_aml_filesystem.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_list_by_aml_filesystem.py
index 73c018c4c20a..8791778dee61 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_list_by_aml_filesystem.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/import_jobs_list_by_aml_filesystem.py
@@ -38,6 +38,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/importJobs_ListByAmlFilesystem.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/importJobs_ListByAmlFilesystem.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/operations_list.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/operations_list.py
index e75e7311f454..513dd26427b3 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/operations_list.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/operations_list.py
@@ -35,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Operations_List.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/Operations_List.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/pause_priming_job.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/pause_priming_job.py
index be80dff482b7..f5ee2ab098d6 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/pause_priming_job.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/pause_priming_job.py
@@ -36,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/PausePrimingJob.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/PausePrimingJob.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/resume_priming_job.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/resume_priming_job.py
index 4c24930a8486..d11207d17a22 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/resume_priming_job.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/resume_priming_job.py
@@ -36,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/ResumePrimingJob.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/ResumePrimingJob.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/skus_list.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/skus_list.py
index 646f230be7e6..9647ee844e9e 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/skus_list.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/skus_list.py
@@ -35,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Skus_List.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/Skus_List.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/space_allocation_post.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/space_allocation_post.py
index 445318b11a21..7ba8d9d41b0b 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/space_allocation_post.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/space_allocation_post.py
@@ -36,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/SpaceAllocation_Post.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/SpaceAllocation_Post.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/start_priming_job.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/start_priming_job.py
index f28299501c0e..5a9ac7df78d3 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/start_priming_job.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/start_priming_job.py
@@ -36,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StartPrimingJob.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/StartPrimingJob.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/stop_priming_job.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/stop_priming_job.py
index bf93f795f492..c3a3cf7236c9 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/stop_priming_job.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/stop_priming_job.py
@@ -36,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StopPrimingJob.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/StopPrimingJob.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_create_or_update.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_create_or_update.py
index b4e41bf66c80..257b1a41d82b 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_create_or_update.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_create_or_update.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.storagecache import StorageCacheManagementClient
@@ -60,6 +58,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_CreateOrUpdate.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/StorageTargets_CreateOrUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_create_or_update_blob_nfs.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_create_or_update_blob_nfs.py
index 090b8e7e5d2a..c4a2922a6482 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_create_or_update_blob_nfs.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_create_or_update_blob_nfs.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.storagecache import StorageCacheManagementClient
@@ -52,6 +50,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_CreateOrUpdate_BlobNfs.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/StorageTargets_CreateOrUpdate_BlobNfs.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_create_or_update_no_junctions.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_create_or_update_no_junctions.py
index 8d222a501677..8c18e143acbf 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_create_or_update_no_junctions.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_create_or_update_no_junctions.py
@@ -6,8 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, IO, Union
-
from azure.identity import DefaultAzureCredential
from azure.mgmt.storagecache import StorageCacheManagementClient
@@ -46,6 +44,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_CreateOrUpdate_NoJunctions.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/StorageTargets_CreateOrUpdate_NoJunctions.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_delete.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_delete.py
index 74f6ce11f958..095467c5d184 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_delete.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_delete.py
@@ -37,6 +37,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_Delete.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/StorageTargets_Delete.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_dns_refresh.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_dns_refresh.py
index d7b59d7d3ec4..e526716b67a8 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_dns_refresh.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_dns_refresh.py
@@ -37,6 +37,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_DnsRefresh.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/StorageTargets_DnsRefresh.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_flush.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_flush.py
index 7210af789959..59e9fffe88cc 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_flush.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_flush.py
@@ -37,6 +37,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_Flush.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/StorageTargets_Flush.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_get.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_get.py
index 767689cb9de0..8c0e17913df4 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_get.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_get.py
@@ -38,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_Get.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/StorageTargets_Get.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_invalidate.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_invalidate.py
index 4cc11eb3c2e2..0adceb7b7bec 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_invalidate.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_invalidate.py
@@ -37,6 +37,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_Invalidate.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/StorageTargets_Invalidate.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_list_by_cache.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_list_by_cache.py
index 69a05296096a..435659e829d4 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_list_by_cache.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_list_by_cache.py
@@ -38,6 +38,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_ListByCache.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/StorageTargets_ListByCache.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_restore_defaults.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_restore_defaults.py
index c9fb34577c9c..9ee9352661f6 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_restore_defaults.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_restore_defaults.py
@@ -37,6 +37,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_RestoreDefaults.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/StorageTargets_RestoreDefaults.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_resume.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_resume.py
index 4370cb1f7761..00eab6e79817 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_resume.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_resume.py
@@ -37,6 +37,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_Resume.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/StorageTargets_Resume.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_suspend.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_suspend.py
index 5cb81460b589..8a9ad04a485d 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_suspend.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/storage_targets_suspend.py
@@ -37,6 +37,6 @@ def main():
).result()
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_Suspend.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/StorageTargets_Suspend.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_samples/usage_models_list.py b/sdk/storage/azure-mgmt-storagecache/generated_samples/usage_models_list.py
index cdd3e018ec77..1811d9be96fc 100644
--- a/sdk/storage/azure-mgmt-storagecache/generated_samples/usage_models_list.py
+++ b/sdk/storage/azure-mgmt-storagecache/generated_samples/usage_models_list.py
@@ -35,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/UsageModels_List.json
+# x-ms-original-file: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-07-01/examples/UsageModels_List.json
if __name__ == "__main__":
main()
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/conftest.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/conftest.py
new file mode 100644
index 000000000000..0662fa2eab35
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/conftest.py
@@ -0,0 +1,41 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import os
+import pytest
+from dotenv import load_dotenv
+from devtools_testutils import (
+ test_proxy,
+ add_general_regex_sanitizer,
+ add_body_key_sanitizer,
+ add_header_regex_sanitizer,
+)
+
+load_dotenv()
+
+
+# For security, please avoid record sensitive identity information in recordings
+@pytest.fixture(scope="session", autouse=True)
+def add_sanitizers(test_proxy):
+ storagecachemanagement_subscription_id = os.environ.get(
+ "AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000"
+ )
+ storagecachemanagement_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000")
+ storagecachemanagement_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000")
+ storagecachemanagement_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(
+ regex=storagecachemanagement_subscription_id, value="00000000-0000-0000-0000-000000000000"
+ )
+ add_general_regex_sanitizer(regex=storagecachemanagement_tenant_id, value="00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(regex=storagecachemanagement_client_id, value="00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(
+ regex=storagecachemanagement_client_secret, value="00000000-0000-0000-0000-000000000000"
+ )
+
+ add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]")
+ add_header_regex_sanitizer(key="Cookie", value="cookie;")
+ add_body_key_sanitizer(json_path="$..access_token", value="access_token")
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management.py
new file mode 100644
index 000000000000..ada618dbcc41
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management.py
@@ -0,0 +1,39 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagement(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_check_aml_fs_subnets(self, resource_group):
+ response = self.client.check_aml_fs_subnets(
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_get_required_aml_fs_subnets_size(self, resource_group):
+ response = self.client.get_required_aml_fs_subnets_size(
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_aml_filesystems_operations.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_aml_filesystems_operations.py
new file mode 100644
index 000000000000..c2e7e7cd15f7
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_aml_filesystems_operations.py
@@ -0,0 +1,192 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementAmlFilesystemsOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_aml_filesystems_list(self, resource_group):
+ response = self.client.aml_filesystems.list(
+ api_version="2024-07-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_aml_filesystems_list_by_resource_group(self, resource_group):
+ response = self.client.aml_filesystems.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2024-07-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_aml_filesystems_begin_delete(self, resource_group):
+ response = self.client.aml_filesystems.begin_delete(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_aml_filesystems_get(self, resource_group):
+ response = self.client.aml_filesystems.get(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_aml_filesystems_begin_create_or_update(self, resource_group):
+ response = self.client.aml_filesystems.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ aml_filesystem={
+ "location": "str",
+ "clientInfo": {
+ "containerStorageInterface": {
+ "persistentVolume": "str",
+ "persistentVolumeClaim": "str",
+ "storageClass": "str",
+ },
+ "lustreVersion": "str",
+ "mgsAddress": "str",
+ "mountCommand": "str",
+ },
+ "encryptionSettings": {"keyEncryptionKey": {"keyUrl": "str", "sourceVault": {"id": "str"}}},
+ "filesystemSubnet": "str",
+ "health": {"state": "str", "statusCode": "str", "statusDescription": "str"},
+ "hsm": {
+ "archiveStatus": [
+ {
+ "filesystemPath": "str",
+ "status": {
+ "errorCode": "str",
+ "errorMessage": "str",
+ "lastCompletionTime": "2020-02-20 00:00:00",
+ "lastStartedTime": "2020-02-20 00:00:00",
+ "percentComplete": 0,
+ "state": "str",
+ },
+ }
+ ],
+ "settings": {
+ "container": "str",
+ "loggingContainer": "str",
+ "importPrefix": "/",
+ "importPrefixesInitial": ["str"],
+ },
+ },
+ "id": "str",
+ "identity": {
+ "principalId": "str",
+ "tenantId": "str",
+ "type": "str",
+ "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}},
+ },
+ "maintenanceWindow": {"dayOfWeek": "str", "timeOfDayUTC": "str"},
+ "name": "str",
+ "provisioningState": "str",
+ "rootSquashSettings": {
+ "mode": "str",
+ "noSquashNidLists": "str",
+ "squashGID": 0,
+ "squashUID": 0,
+ "status": "str",
+ },
+ "sku": {"name": "str"},
+ "storageCapacityTiB": 0.0,
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "throughputProvisionedMBps": 0,
+ "type": "str",
+ "zones": ["str"],
+ },
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_aml_filesystems_begin_update(self, resource_group):
+ response = self.client.aml_filesystems.begin_update(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ aml_filesystem={
+ "encryptionSettings": {"keyEncryptionKey": {"keyUrl": "str", "sourceVault": {"id": "str"}}},
+ "maintenanceWindow": {"dayOfWeek": "str", "timeOfDayUTC": "str"},
+ "rootSquashSettings": {
+ "mode": "str",
+ "noSquashNidLists": "str",
+ "squashGID": 0,
+ "squashUID": 0,
+ "status": "str",
+ },
+ "tags": {"str": "str"},
+ },
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_aml_filesystems_archive(self, resource_group):
+ response = self.client.aml_filesystems.archive(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_aml_filesystems_cancel_archive(self, resource_group):
+ response = self.client.aml_filesystems.cancel_archive(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_aml_filesystems_operations_async.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_aml_filesystems_operations_async.py
new file mode 100644
index 000000000000..5b7dec792314
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_aml_filesystems_operations_async.py
@@ -0,0 +1,199 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache.aio import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementAmlFilesystemsOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_aml_filesystems_list(self, resource_group):
+ response = self.client.aml_filesystems.list(
+ api_version="2024-07-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_aml_filesystems_list_by_resource_group(self, resource_group):
+ response = self.client.aml_filesystems.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2024-07-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_aml_filesystems_begin_delete(self, resource_group):
+ response = await (
+ await self.client.aml_filesystems.begin_delete(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_aml_filesystems_get(self, resource_group):
+ response = await self.client.aml_filesystems.get(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_aml_filesystems_begin_create_or_update(self, resource_group):
+ response = await (
+ await self.client.aml_filesystems.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ aml_filesystem={
+ "location": "str",
+ "clientInfo": {
+ "containerStorageInterface": {
+ "persistentVolume": "str",
+ "persistentVolumeClaim": "str",
+ "storageClass": "str",
+ },
+ "lustreVersion": "str",
+ "mgsAddress": "str",
+ "mountCommand": "str",
+ },
+ "encryptionSettings": {"keyEncryptionKey": {"keyUrl": "str", "sourceVault": {"id": "str"}}},
+ "filesystemSubnet": "str",
+ "health": {"state": "str", "statusCode": "str", "statusDescription": "str"},
+ "hsm": {
+ "archiveStatus": [
+ {
+ "filesystemPath": "str",
+ "status": {
+ "errorCode": "str",
+ "errorMessage": "str",
+ "lastCompletionTime": "2020-02-20 00:00:00",
+ "lastStartedTime": "2020-02-20 00:00:00",
+ "percentComplete": 0,
+ "state": "str",
+ },
+ }
+ ],
+ "settings": {
+ "container": "str",
+ "loggingContainer": "str",
+ "importPrefix": "/",
+ "importPrefixesInitial": ["str"],
+ },
+ },
+ "id": "str",
+ "identity": {
+ "principalId": "str",
+ "tenantId": "str",
+ "type": "str",
+ "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}},
+ },
+ "maintenanceWindow": {"dayOfWeek": "str", "timeOfDayUTC": "str"},
+ "name": "str",
+ "provisioningState": "str",
+ "rootSquashSettings": {
+ "mode": "str",
+ "noSquashNidLists": "str",
+ "squashGID": 0,
+ "squashUID": 0,
+ "status": "str",
+ },
+ "sku": {"name": "str"},
+ "storageCapacityTiB": 0.0,
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "throughputProvisionedMBps": 0,
+ "type": "str",
+ "zones": ["str"],
+ },
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_aml_filesystems_begin_update(self, resource_group):
+ response = await (
+ await self.client.aml_filesystems.begin_update(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ aml_filesystem={
+ "encryptionSettings": {"keyEncryptionKey": {"keyUrl": "str", "sourceVault": {"id": "str"}}},
+ "maintenanceWindow": {"dayOfWeek": "str", "timeOfDayUTC": "str"},
+ "rootSquashSettings": {
+ "mode": "str",
+ "noSquashNidLists": "str",
+ "squashGID": 0,
+ "squashUID": 0,
+ "status": "str",
+ },
+ "tags": {"str": "str"},
+ },
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_aml_filesystems_archive(self, resource_group):
+ response = await self.client.aml_filesystems.archive(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_aml_filesystems_cancel_archive(self, resource_group):
+ response = await self.client.aml_filesystems.cancel_archive(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_asc_operations_operations.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_asc_operations_operations.py
new file mode 100644
index 000000000000..c5d5ab007e85
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_asc_operations_operations.py
@@ -0,0 +1,31 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementAscOperationsOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_asc_operations_get(self, resource_group):
+ response = self.client.asc_operations.get(
+ location="str",
+ operation_id="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_asc_operations_operations_async.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_asc_operations_operations_async.py
new file mode 100644
index 000000000000..46c77a9042e4
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_asc_operations_operations_async.py
@@ -0,0 +1,32 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache.aio import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementAscOperationsOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_asc_operations_get(self, resource_group):
+ response = await self.client.asc_operations.get(
+ location="str",
+ operation_id="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_asc_usages_operations.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_asc_usages_operations.py
new file mode 100644
index 000000000000..ad065b4804d5
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_asc_usages_operations.py
@@ -0,0 +1,30 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementAscUsagesOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_asc_usages_list(self, resource_group):
+ response = self.client.asc_usages.list(
+ location="str",
+ api_version="2024-07-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_asc_usages_operations_async.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_asc_usages_operations_async.py
new file mode 100644
index 000000000000..211b83db1964
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_asc_usages_operations_async.py
@@ -0,0 +1,31 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache.aio import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementAscUsagesOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_asc_usages_list(self, resource_group):
+ response = self.client.asc_usages.list(
+ location="str",
+ api_version="2024-07-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_async.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_async.py
new file mode 100644
index 000000000000..802c4e7ab833
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_async.py
@@ -0,0 +1,40 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache.aio import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_check_aml_fs_subnets(self, resource_group):
+ response = await self.client.check_aml_fs_subnets(
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_get_required_aml_fs_subnets_size(self, resource_group):
+ response = await self.client.get_required_aml_fs_subnets_size(
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_auto_export_jobs_operations.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_auto_export_jobs_operations.py
new file mode 100644
index 000000000000..86b01dadacc7
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_auto_export_jobs_operations.py
@@ -0,0 +1,117 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementAutoExportJobsOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_auto_export_jobs_begin_delete(self, resource_group):
+ response = self.client.auto_export_jobs.begin_delete(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ auto_export_job_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_auto_export_jobs_get(self, resource_group):
+ response = self.client.auto_export_jobs.get(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ auto_export_job_name="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_auto_export_jobs_begin_create_or_update(self, resource_group):
+ response = self.client.auto_export_jobs.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ auto_export_job_name="str",
+ auto_export_job={
+ "location": "str",
+ "adminStatus": "Active",
+ "autoExportPrefixes": ["str"],
+ "currentIterationFilesDiscovered": 0,
+ "currentIterationFilesExported": 0,
+ "currentIterationFilesFailed": 0,
+ "currentIterationMiBDiscovered": 0,
+ "currentIterationMiBExported": 0,
+ "exportIterationCount": 0,
+ "id": "str",
+ "lastCompletionTimeUTC": "2020-02-20 00:00:00",
+ "lastStartedTimeUTC": "2020-02-20 00:00:00",
+ "lastSuccessfulIterationCompletionTimeUTC": "2020-02-20 00:00:00",
+ "name": "str",
+ "provisioningState": "str",
+ "state": "str",
+ "statusCode": "str",
+ "statusMessage": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "totalFilesExported": 0,
+ "totalFilesFailed": 0,
+ "totalMiBExported": 0,
+ "type": "str",
+ },
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_auto_export_jobs_begin_update(self, resource_group):
+ response = self.client.auto_export_jobs.begin_update(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ auto_export_job_name="str",
+ auto_export_job={"tags": {"str": "str"}},
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_auto_export_jobs_list_by_aml_filesystem(self, resource_group):
+ response = self.client.auto_export_jobs.list_by_aml_filesystem(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ api_version="2024-07-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_auto_export_jobs_operations_async.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_auto_export_jobs_operations_async.py
new file mode 100644
index 000000000000..b6fae7540df4
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_auto_export_jobs_operations_async.py
@@ -0,0 +1,124 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache.aio import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementAutoExportJobsOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_auto_export_jobs_begin_delete(self, resource_group):
+ response = await (
+ await self.client.auto_export_jobs.begin_delete(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ auto_export_job_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_auto_export_jobs_get(self, resource_group):
+ response = await self.client.auto_export_jobs.get(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ auto_export_job_name="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_auto_export_jobs_begin_create_or_update(self, resource_group):
+ response = await (
+ await self.client.auto_export_jobs.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ auto_export_job_name="str",
+ auto_export_job={
+ "location": "str",
+ "adminStatus": "Active",
+ "autoExportPrefixes": ["str"],
+ "currentIterationFilesDiscovered": 0,
+ "currentIterationFilesExported": 0,
+ "currentIterationFilesFailed": 0,
+ "currentIterationMiBDiscovered": 0,
+ "currentIterationMiBExported": 0,
+ "exportIterationCount": 0,
+ "id": "str",
+ "lastCompletionTimeUTC": "2020-02-20 00:00:00",
+ "lastStartedTimeUTC": "2020-02-20 00:00:00",
+ "lastSuccessfulIterationCompletionTimeUTC": "2020-02-20 00:00:00",
+ "name": "str",
+ "provisioningState": "str",
+ "state": "str",
+ "statusCode": "str",
+ "statusMessage": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "totalFilesExported": 0,
+ "totalFilesFailed": 0,
+ "totalMiBExported": 0,
+ "type": "str",
+ },
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_auto_export_jobs_begin_update(self, resource_group):
+ response = await (
+ await self.client.auto_export_jobs.begin_update(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ auto_export_job_name="str",
+ auto_export_job={"tags": {"str": "str"}},
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_auto_export_jobs_list_by_aml_filesystem(self, resource_group):
+ response = self.client.auto_export_jobs.list_by_aml_filesystem(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ api_version="2024-07-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_caches_operations.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_caches_operations.py
new file mode 100644
index 000000000000..ded67774fcf9
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_caches_operations.py
@@ -0,0 +1,315 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementCachesOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_list(self, resource_group):
+ response = self.client.caches.list(
+ api_version="2024-07-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_list_by_resource_group(self, resource_group):
+ response = self.client.caches.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2024-07-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_begin_delete(self, resource_group):
+ response = self.client.caches.begin_delete(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_get(self, resource_group):
+ response = self.client.caches.get(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_begin_create_or_update(self, resource_group):
+ response = self.client.caches.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ cache={
+ "cacheSizeGB": 0,
+ "directoryServicesSettings": {
+ "activeDirectory": {
+ "cacheNetBiosName": "str",
+ "domainName": "str",
+ "domainNetBiosName": "str",
+ "primaryDnsIpAddress": "str",
+ "credentials": {"username": "str", "password": "str"},
+ "domainJoined": "str",
+ "secondaryDnsIpAddress": "str",
+ },
+ "usernameDownload": {
+ "autoDownloadCertificate": False,
+ "caCertificateURI": "str",
+ "credentials": {"bindDn": "str", "bindPassword": "str"},
+ "encryptLdapConnection": False,
+ "extendedGroups": bool,
+ "groupFileURI": "str",
+ "ldapBaseDN": "str",
+ "ldapServer": "str",
+ "requireValidCertificate": False,
+ "userFileURI": "str",
+ "usernameDownloaded": "str",
+ "usernameSource": "None",
+ },
+ },
+ "encryptionSettings": {
+ "keyEncryptionKey": {"keyUrl": "str", "sourceVault": {"id": "str"}},
+ "rotationToLatestKeyVersionEnabled": bool,
+ },
+ "health": {
+ "conditions": [{"message": "str", "timestamp": "2020-02-20 00:00:00"}],
+ "state": "str",
+ "statusDescription": "str",
+ },
+ "id": "str",
+ "identity": {
+ "principalId": "str",
+ "tenantId": "str",
+ "type": "str",
+ "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}},
+ },
+ "location": "str",
+ "mountAddresses": ["str"],
+ "name": "str",
+ "networkSettings": {
+ "dnsSearchDomain": "str",
+ "dnsServers": ["str"],
+ "mtu": 1500,
+ "ntpServer": "time.windows.com",
+ "utilityAddresses": ["str"],
+ },
+ "primingJobs": [
+ {
+ "primingJobName": "str",
+ "primingManifestUrl": "str",
+ "primingJobDetails": "str",
+ "primingJobId": "str",
+ "primingJobPercentComplete": 0.0,
+ "primingJobState": "str",
+ "primingJobStatus": "str",
+ }
+ ],
+ "provisioningState": "str",
+ "securitySettings": {
+ "accessPolicies": [
+ {
+ "accessRules": [
+ {
+ "access": "str",
+ "scope": "str",
+ "anonymousGID": "str",
+ "anonymousUID": "str",
+ "filter": "str",
+ "rootSquash": bool,
+ "submountAccess": bool,
+ "suid": bool,
+ }
+ ],
+ "name": "str",
+ }
+ ]
+ },
+ "sku": {"name": "str"},
+ "spaceAllocation": [{"allocationPercentage": 0, "name": "str"}],
+ "subnet": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ "upgradeSettings": {"scheduledTime": "2020-02-20 00:00:00", "upgradeScheduleEnabled": bool},
+ "upgradeStatus": {
+ "currentFirmwareVersion": "str",
+ "firmwareUpdateDeadline": "2020-02-20 00:00:00",
+ "firmwareUpdateStatus": "str",
+ "lastFirmwareUpdate": "2020-02-20 00:00:00",
+ "pendingFirmwareVersion": "str",
+ },
+ "zones": ["str"],
+ },
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_begin_update(self, resource_group):
+ response = self.client.caches.begin_update(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_begin_debug_info(self, resource_group):
+ response = self.client.caches.begin_debug_info(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_begin_flush(self, resource_group):
+ response = self.client.caches.begin_flush(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_begin_start(self, resource_group):
+ response = self.client.caches.begin_start(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_begin_stop(self, resource_group):
+ response = self.client.caches.begin_stop(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_begin_start_priming_job(self, resource_group):
+ response = self.client.caches.begin_start_priming_job(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_begin_stop_priming_job(self, resource_group):
+ response = self.client.caches.begin_stop_priming_job(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_begin_pause_priming_job(self, resource_group):
+ response = self.client.caches.begin_pause_priming_job(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_begin_resume_priming_job(self, resource_group):
+ response = self.client.caches.begin_resume_priming_job(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_begin_upgrade_firmware(self, resource_group):
+ response = self.client.caches.begin_upgrade_firmware(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_caches_begin_space_allocation(self, resource_group):
+ response = self.client.caches.begin_space_allocation(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_caches_operations_async.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_caches_operations_async.py
new file mode 100644
index 000000000000..bc4dec734989
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_caches_operations_async.py
@@ -0,0 +1,342 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache.aio import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementCachesOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_list(self, resource_group):
+ response = self.client.caches.list(
+ api_version="2024-07-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_list_by_resource_group(self, resource_group):
+ response = self.client.caches.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2024-07-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_begin_delete(self, resource_group):
+ response = await (
+ await self.client.caches.begin_delete(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_get(self, resource_group):
+ response = await self.client.caches.get(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_begin_create_or_update(self, resource_group):
+ response = await (
+ await self.client.caches.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ cache={
+ "cacheSizeGB": 0,
+ "directoryServicesSettings": {
+ "activeDirectory": {
+ "cacheNetBiosName": "str",
+ "domainName": "str",
+ "domainNetBiosName": "str",
+ "primaryDnsIpAddress": "str",
+ "credentials": {"username": "str", "password": "str"},
+ "domainJoined": "str",
+ "secondaryDnsIpAddress": "str",
+ },
+ "usernameDownload": {
+ "autoDownloadCertificate": False,
+ "caCertificateURI": "str",
+ "credentials": {"bindDn": "str", "bindPassword": "str"},
+ "encryptLdapConnection": False,
+ "extendedGroups": bool,
+ "groupFileURI": "str",
+ "ldapBaseDN": "str",
+ "ldapServer": "str",
+ "requireValidCertificate": False,
+ "userFileURI": "str",
+ "usernameDownloaded": "str",
+ "usernameSource": "None",
+ },
+ },
+ "encryptionSettings": {
+ "keyEncryptionKey": {"keyUrl": "str", "sourceVault": {"id": "str"}},
+ "rotationToLatestKeyVersionEnabled": bool,
+ },
+ "health": {
+ "conditions": [{"message": "str", "timestamp": "2020-02-20 00:00:00"}],
+ "state": "str",
+ "statusDescription": "str",
+ },
+ "id": "str",
+ "identity": {
+ "principalId": "str",
+ "tenantId": "str",
+ "type": "str",
+ "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}},
+ },
+ "location": "str",
+ "mountAddresses": ["str"],
+ "name": "str",
+ "networkSettings": {
+ "dnsSearchDomain": "str",
+ "dnsServers": ["str"],
+ "mtu": 1500,
+ "ntpServer": "time.windows.com",
+ "utilityAddresses": ["str"],
+ },
+ "primingJobs": [
+ {
+ "primingJobName": "str",
+ "primingManifestUrl": "str",
+ "primingJobDetails": "str",
+ "primingJobId": "str",
+ "primingJobPercentComplete": 0.0,
+ "primingJobState": "str",
+ "primingJobStatus": "str",
+ }
+ ],
+ "provisioningState": "str",
+ "securitySettings": {
+ "accessPolicies": [
+ {
+ "accessRules": [
+ {
+ "access": "str",
+ "scope": "str",
+ "anonymousGID": "str",
+ "anonymousUID": "str",
+ "filter": "str",
+ "rootSquash": bool,
+ "submountAccess": bool,
+ "suid": bool,
+ }
+ ],
+ "name": "str",
+ }
+ ]
+ },
+ "sku": {"name": "str"},
+ "spaceAllocation": [{"allocationPercentage": 0, "name": "str"}],
+ "subnet": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ "upgradeSettings": {"scheduledTime": "2020-02-20 00:00:00", "upgradeScheduleEnabled": bool},
+ "upgradeStatus": {
+ "currentFirmwareVersion": "str",
+ "firmwareUpdateDeadline": "2020-02-20 00:00:00",
+ "firmwareUpdateStatus": "str",
+ "lastFirmwareUpdate": "2020-02-20 00:00:00",
+ "pendingFirmwareVersion": "str",
+ },
+ "zones": ["str"],
+ },
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_begin_update(self, resource_group):
+ response = await (
+ await self.client.caches.begin_update(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_begin_debug_info(self, resource_group):
+ response = await (
+ await self.client.caches.begin_debug_info(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_begin_flush(self, resource_group):
+ response = await (
+ await self.client.caches.begin_flush(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_begin_start(self, resource_group):
+ response = await (
+ await self.client.caches.begin_start(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_begin_stop(self, resource_group):
+ response = await (
+ await self.client.caches.begin_stop(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_begin_start_priming_job(self, resource_group):
+ response = await (
+ await self.client.caches.begin_start_priming_job(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_begin_stop_priming_job(self, resource_group):
+ response = await (
+ await self.client.caches.begin_stop_priming_job(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_begin_pause_priming_job(self, resource_group):
+ response = await (
+ await self.client.caches.begin_pause_priming_job(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_begin_resume_priming_job(self, resource_group):
+ response = await (
+ await self.client.caches.begin_resume_priming_job(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_begin_upgrade_firmware(self, resource_group):
+ response = await (
+ await self.client.caches.begin_upgrade_firmware(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_caches_begin_space_allocation(self, resource_group):
+ response = await (
+ await self.client.caches.begin_space_allocation(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_import_jobs_operations.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_import_jobs_operations.py
new file mode 100644
index 000000000000..d56090b80334
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_import_jobs_operations.py
@@ -0,0 +1,120 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementImportJobsOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_import_jobs_begin_delete(self, resource_group):
+ response = self.client.import_jobs.begin_delete(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ import_job_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_import_jobs_get(self, resource_group):
+ response = self.client.import_jobs.get(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ import_job_name="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_import_jobs_begin_create_or_update(self, resource_group):
+ response = self.client.import_jobs.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ import_job_name="str",
+ import_job={
+ "location": "str",
+ "adminStatus": "Active",
+ "blobsImportedPerSecond": 0,
+ "blobsWalkedPerSecond": 0,
+ "conflictResolutionMode": "Fail",
+ "id": "str",
+ "importPrefixes": ["str"],
+ "importedDirectories": 0,
+ "importedFiles": 0,
+ "importedSymlinks": 0,
+ "lastCompletionTime": "2020-02-20 00:00:00",
+ "lastStartedTime": "2020-02-20 00:00:00",
+ "maximumErrors": 0,
+ "name": "str",
+ "preexistingDirectories": 0,
+ "preexistingFiles": 0,
+ "preexistingSymlinks": 0,
+ "provisioningState": "str",
+ "state": "str",
+ "statusMessage": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "totalBlobsImported": 0,
+ "totalBlobsWalked": 0,
+ "totalConflicts": 0,
+ "totalErrors": 0,
+ "type": "str",
+ },
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_import_jobs_begin_update(self, resource_group):
+ response = self.client.import_jobs.begin_update(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ import_job_name="str",
+ import_job={"tags": {"str": "str"}},
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_import_jobs_list_by_aml_filesystem(self, resource_group):
+ response = self.client.import_jobs.list_by_aml_filesystem(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ api_version="2024-07-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_import_jobs_operations_async.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_import_jobs_operations_async.py
new file mode 100644
index 000000000000..f60bcf9d8d6a
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_import_jobs_operations_async.py
@@ -0,0 +1,127 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache.aio import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementImportJobsOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_import_jobs_begin_delete(self, resource_group):
+ response = await (
+ await self.client.import_jobs.begin_delete(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ import_job_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_import_jobs_get(self, resource_group):
+ response = await self.client.import_jobs.get(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ import_job_name="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_import_jobs_begin_create_or_update(self, resource_group):
+ response = await (
+ await self.client.import_jobs.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ import_job_name="str",
+ import_job={
+ "location": "str",
+ "adminStatus": "Active",
+ "blobsImportedPerSecond": 0,
+ "blobsWalkedPerSecond": 0,
+ "conflictResolutionMode": "Fail",
+ "id": "str",
+ "importPrefixes": ["str"],
+ "importedDirectories": 0,
+ "importedFiles": 0,
+ "importedSymlinks": 0,
+ "lastCompletionTime": "2020-02-20 00:00:00",
+ "lastStartedTime": "2020-02-20 00:00:00",
+ "maximumErrors": 0,
+ "name": "str",
+ "preexistingDirectories": 0,
+ "preexistingFiles": 0,
+ "preexistingSymlinks": 0,
+ "provisioningState": "str",
+ "state": "str",
+ "statusMessage": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "totalBlobsImported": 0,
+ "totalBlobsWalked": 0,
+ "totalConflicts": 0,
+ "totalErrors": 0,
+ "type": "str",
+ },
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_import_jobs_begin_update(self, resource_group):
+ response = await (
+ await self.client.import_jobs.begin_update(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ import_job_name="str",
+ import_job={"tags": {"str": "str"}},
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_import_jobs_list_by_aml_filesystem(self, resource_group):
+ response = self.client.import_jobs.list_by_aml_filesystem(
+ resource_group_name=resource_group.name,
+ aml_filesystem_name="str",
+ api_version="2024-07-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_operations.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_operations.py
new file mode 100644
index 000000000000..2fad874e58c1
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_operations.py
@@ -0,0 +1,29 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_operations_list(self, resource_group):
+ response = self.client.operations.list(
+ api_version="2024-07-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_operations_async.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_operations_async.py
new file mode 100644
index 000000000000..fb3b76dbf257
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_operations_async.py
@@ -0,0 +1,30 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache.aio import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_operations_list(self, resource_group):
+ response = self.client.operations.list(
+ api_version="2024-07-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_skus_operations.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_skus_operations.py
new file mode 100644
index 000000000000..acc1d0078711
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_skus_operations.py
@@ -0,0 +1,29 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementSkusOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_skus_list(self, resource_group):
+ response = self.client.skus.list(
+ api_version="2024-07-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_skus_operations_async.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_skus_operations_async.py
new file mode 100644
index 000000000000..b92802392a79
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_skus_operations_async.py
@@ -0,0 +1,30 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache.aio import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementSkusOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_skus_list(self, resource_group):
+ response = self.client.skus.list(
+ api_version="2024-07-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_storage_target_operations.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_storage_target_operations.py
new file mode 100644
index 000000000000..cef15602e1fc
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_storage_target_operations.py
@@ -0,0 +1,71 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementStorageTargetOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_target_begin_flush(self, resource_group):
+ response = self.client.storage_target.begin_flush(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_target_begin_suspend(self, resource_group):
+ response = self.client.storage_target.begin_suspend(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_target_begin_resume(self, resource_group):
+ response = self.client.storage_target.begin_resume(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_target_begin_invalidate(self, resource_group):
+ response = self.client.storage_target.begin_invalidate(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_storage_target_operations_async.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_storage_target_operations_async.py
new file mode 100644
index 000000000000..db36a9f22d2f
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_storage_target_operations_async.py
@@ -0,0 +1,80 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache.aio import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementStorageTargetOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_target_begin_flush(self, resource_group):
+ response = await (
+ await self.client.storage_target.begin_flush(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_target_begin_suspend(self, resource_group):
+ response = await (
+ await self.client.storage_target.begin_suspend(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_target_begin_resume(self, resource_group):
+ response = await (
+ await self.client.storage_target.begin_resume(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_target_begin_invalidate(self, resource_group):
+ response = await (
+ await self.client.storage_target.begin_invalidate(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_storage_targets_operations.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_storage_targets_operations.py
new file mode 100644
index 000000000000..f362ca65a0e4
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_storage_targets_operations.py
@@ -0,0 +1,121 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementStorageTargetsOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_targets_begin_dns_refresh(self, resource_group):
+ response = self.client.storage_targets.begin_dns_refresh(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_targets_list_by_cache(self, resource_group):
+ response = self.client.storage_targets.list_by_cache(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_targets_begin_delete(self, resource_group):
+ response = self.client.storage_targets.begin_delete(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_targets_get(self, resource_group):
+ response = self.client.storage_targets.get(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_targets_begin_create_or_update(self, resource_group):
+ response = self.client.storage_targets.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ storagetarget={
+ "allocationPercentage": 0,
+ "blobNfs": {"target": "str", "usageModel": "str", "verificationTimer": 0, "writeBackTimer": 0},
+ "clfs": {"target": "str"},
+ "id": "str",
+ "junctions": [
+ {"namespacePath": "str", "nfsAccessPolicy": "default", "nfsExport": "str", "targetPath": "str"}
+ ],
+ "location": "str",
+ "name": "str",
+ "nfs3": {"target": "str", "usageModel": "str", "verificationTimer": 0, "writeBackTimer": 0},
+ "provisioningState": "str",
+ "state": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "targetType": "str",
+ "type": "str",
+ "unknown": {"attributes": {"str": "str"}},
+ },
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_storage_targets_begin_restore_defaults(self, resource_group):
+ response = self.client.storage_targets.begin_restore_defaults(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_storage_targets_operations_async.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_storage_targets_operations_async.py
new file mode 100644
index 000000000000..1e7dcad2ce21
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_storage_targets_operations_async.py
@@ -0,0 +1,130 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache.aio import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementStorageTargetsOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_targets_begin_dns_refresh(self, resource_group):
+ response = await (
+ await self.client.storage_targets.begin_dns_refresh(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_targets_list_by_cache(self, resource_group):
+ response = self.client.storage_targets.list_by_cache(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ api_version="2024-07-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_targets_begin_delete(self, resource_group):
+ response = await (
+ await self.client.storage_targets.begin_delete(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_targets_get(self, resource_group):
+ response = await self.client.storage_targets.get(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_targets_begin_create_or_update(self, resource_group):
+ response = await (
+ await self.client.storage_targets.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ storagetarget={
+ "allocationPercentage": 0,
+ "blobNfs": {"target": "str", "usageModel": "str", "verificationTimer": 0, "writeBackTimer": 0},
+ "clfs": {"target": "str"},
+ "id": "str",
+ "junctions": [
+ {"namespacePath": "str", "nfsAccessPolicy": "default", "nfsExport": "str", "targetPath": "str"}
+ ],
+ "location": "str",
+ "name": "str",
+ "nfs3": {"target": "str", "usageModel": "str", "verificationTimer": 0, "writeBackTimer": 0},
+ "provisioningState": "str",
+ "state": "str",
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "targetType": "str",
+ "type": "str",
+ "unknown": {"attributes": {"str": "str"}},
+ },
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_storage_targets_begin_restore_defaults(self, resource_group):
+ response = await (
+ await self.client.storage_targets.begin_restore_defaults(
+ resource_group_name=resource_group.name,
+ cache_name="str",
+ storage_target_name="str",
+ api_version="2024-07-01",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_usage_models_operations.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_usage_models_operations.py
new file mode 100644
index 000000000000..ac803e6c5330
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_usage_models_operations.py
@@ -0,0 +1,29 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementUsageModelsOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_usage_models_list(self, resource_group):
+ response = self.client.usage_models.list(
+ api_version="2024-07-01",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_usage_models_operations_async.py b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_usage_models_operations_async.py
new file mode 100644
index 000000000000..898d1d729629
--- /dev/null
+++ b/sdk/storage/azure-mgmt-storagecache/generated_tests/test_storage_cache_management_usage_models_operations_async.py
@@ -0,0 +1,30 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.storagecache.aio import StorageCacheManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestStorageCacheManagementUsageModelsOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(StorageCacheManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_usage_models_list(self, resource_group):
+ response = self.client.usage_models.list(
+ api_version="2024-07-01",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/storage/azure-mgmt-storagecache/setup.py b/sdk/storage/azure-mgmt-storagecache/setup.py
index 128852a29e4d..5e91f1a1c81c 100644
--- a/sdk/storage/azure-mgmt-storagecache/setup.py
+++ b/sdk/storage/azure-mgmt-storagecache/setup.py
@@ -75,6 +75,7 @@
},
install_requires=[
"isodate>=0.6.1",
+ "typing-extensions>=4.6.0",
"azure-common>=1.1",
"azure-mgmt-core>=1.3.2",
],