diff --git a/sdk/databox/azure-mgmt-databox/_meta.json b/sdk/databox/azure-mgmt-databox/_meta.json
index a5722d23dd62..6e1d4c1fbfd2 100644
--- a/sdk/databox/azure-mgmt-databox/_meta.json
+++ b/sdk/databox/azure-mgmt-databox/_meta.json
@@ -1,11 +1,11 @@
{
- "commit": "2776cb32cd6ca9ea953a13ae26c954b989e83367",
+ "commit": "01ae984566fbcacff0dbd666b35b935cfa23c107",
"repository_url": "https://github.com/Azure/azure-rest-api-specs",
"autorest": "3.10.2",
"use": [
- "@autorest/python@6.19.0",
+ "@autorest/python@6.27.4",
"@autorest/modelerfour@4.27.0"
],
- "autorest_command": "autorest specification/databox/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/azure-sdk-for-python/sdk --tag=package-2022-12 --use=@autorest/python@6.19.0 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False",
+ "autorest_command": "autorest specification/databox/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.27.4 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False",
"readme": "specification/databox/resource-manager/readme.md"
}
\ No newline at end of file
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/__init__.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/__init__.py
index 063260fb2814..80a1f3c3f55a 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/__init__.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/__init__.py
@@ -5,15 +5,21 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._data_box_management_client import DataBoxManagementClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._data_box_management_client import DataBoxManagementClient # type: ignore
from ._version import VERSION
__version__ = VERSION
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -21,6 +27,6 @@
__all__ = [
"DataBoxManagementClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_configuration.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_configuration.py
index 38a799844eb0..089fb44c0ef3 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_configuration.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_configuration.py
@@ -14,11 +14,10 @@
from ._version import VERSION
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
-class DataBoxManagementClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+class DataBoxManagementClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for DataBoxManagementClient.
Note that all parameters used to create this instance are saved as instance
@@ -28,13 +27,13 @@ class DataBoxManagementClientConfiguration: # pylint: disable=too-many-instance
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The Subscription Id. Required.
:type subscription_id: str
- :keyword api_version: Api Version. Default value is "2022-12-01". Note that overriding this
+ :keyword api_version: Api Version. Default value is "2025-02-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None:
- api_version: str = kwargs.pop("api_version", "2022-12-01")
+ api_version: str = kwargs.pop("api_version", "2025-02-01")
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_data_box_management_client.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_data_box_management_client.py
index d0d71f098ab2..15fb1aa16cd1 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_data_box_management_client.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_data_box_management_client.py
@@ -21,13 +21,10 @@
from .operations import DataBoxManagementClientOperationsMixin, JobsOperations, Operations, ServiceOperations
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
-class DataBoxManagementClient(
- DataBoxManagementClientOperationsMixin
-): # pylint: disable=client-accepts-api-version-keyword
+class DataBoxManagementClient(DataBoxManagementClientOperationsMixin):
"""The DataBox Client.
:ivar operations: Operations operations
@@ -42,7 +39,7 @@ class DataBoxManagementClient(
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
- :keyword api_version: Api Version. Default value is "2022-12-01". Note that overriding this
+ :keyword api_version: Api Version. Default value is "2025-02-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_serialization.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_serialization.py
index 8139854b97bb..b24ab2885450 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_serialization.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_serialization.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -24,7 +25,6 @@
#
# --------------------------------------------------------------------------
-# pylint: skip-file
# pyright: reportUnnecessaryTypeIgnoreComment=false
from base64 import b64decode, b64encode
@@ -52,7 +52,6 @@
MutableMapping,
Type,
List,
- Mapping,
)
try:
@@ -91,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
:param data: Input, could be bytes or stream (will be decoded with UTF8) or text
:type data: str or bytes or IO
:param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
"""
if hasattr(data, "read"):
# Assume a stream
@@ -112,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
try:
return json.loads(data_as_str)
except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err)
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
elif "xml" in (content_type or []):
try:
@@ -155,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
Use bytes and headers to NOT use any requests/aiohttp or whatever
specific implementation.
Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
"""
# Try to use content-type from headers if available
content_type = None
@@ -184,15 +190,30 @@ class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
- """UTF offset for UTC is 0."""
+ """UTF offset for UTC is 0.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The offset
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(0)
def tzname(self, dt):
- """Timestamp representation."""
+ """Timestamp representation.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The timestamp representation
+ :rtype: str
+ """
return "Z"
def dst(self, dt):
- """No daylight saving for UTC."""
+ """No daylight saving for UTC.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The daylight saving time
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(hours=1)
@@ -206,7 +227,7 @@ class _FixedOffset(datetime.tzinfo): # type: ignore
:param datetime.timedelta offset: offset in timedelta format
"""
- def __init__(self, offset):
+ def __init__(self, offset) -> None:
self.__offset = offset
def utcoffset(self, dt):
@@ -235,24 +256,26 @@ def __getinitargs__(self):
_FLATTEN = re.compile(r"(? None:
self.additional_properties: Optional[Dict[str, Any]] = {}
- for k in kwargs:
+ for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
elif k in self._validation and self._validation[k].get("readonly", False):
@@ -300,13 +330,23 @@ def __init__(self, **kwargs: Any) -> None:
setattr(self, k, kwargs[k])
def __eq__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
return not self.__eq__(other)
def __str__(self) -> str:
@@ -326,7 +366,11 @@ def is_xml_model(cls) -> bool:
@classmethod
def _create_xml_node(cls):
- """Create XML node."""
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
try:
xml_map = cls._xml_map # type: ignore
except AttributeError:
@@ -346,7 +390,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
def as_dict(
self,
@@ -380,12 +426,15 @@ def my_key_transformer(key, attr_desc, value):
If you want XML serialization, you can pass the kwargs is_xml=True.
+ :param bool keep_readonly: If you want to serialize the readonly attributes
:param function key_transformer: A key transformer function.
:returns: A dict JSON compatible object
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
@classmethod
def _infer_class_models(cls):
@@ -395,7 +444,7 @@ def _infer_class_models(cls):
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
if cls.__name__ not in client_models:
raise ValueError("Not Autorest generated code")
- except Exception:
+ except Exception: # pylint: disable=broad-exception-caught
# Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
client_models = {cls.__name__: cls}
return client_models
@@ -408,6 +457,7 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@@ -426,9 +476,11 @@ def from_dict(
and last_rest_key_case_insensitive_extractor)
:param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
deserializer.key_extractors = ( # type: ignore
@@ -448,21 +500,25 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects))
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
return result
@classmethod
def _classify(cls, response, objects):
"""Check the class _subtype_map for any child classes.
We want to ignore any inherited _subtype_maps.
- Remove the polymorphic key from the initial data.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
"""
for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
subtype_value = None
if not isinstance(response, ET.Element):
rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
- subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None)
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
else:
subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
if subtype_value:
@@ -501,11 +557,13 @@ def _decode_attribute_map_key(key):
inside the received data.
:param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
"""
return key.replace("\\.", ".")
-class Serializer(object):
+class Serializer: # pylint: disable=too-many-public-methods
"""Request object model serializer."""
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
@@ -540,7 +598,7 @@ class Serializer(object):
"multiple": lambda x, y: x % y != 0,
}
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.serialize_type = {
"iso-8601": Serializer.serialize_iso,
"rfc-1123": Serializer.serialize_rfc,
@@ -560,13 +618,16 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None):
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
- def _serialize(self, target_obj, data_type=None, **kwargs):
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
"""Serialize data into a string according to type.
- :param target_obj: The data to be serialized.
+ :param object target_obj: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str, dict
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
"""
key_transformer = kwargs.get("key_transformer", self.key_transformer)
keep_readonly = kwargs.get("keep_readonly", False)
@@ -592,12 +653,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
serialized = {}
if is_xml_model_serialization:
- serialized = target_obj._create_xml_node()
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
try:
- attributes = target_obj._attribute_map
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
for attr, attr_desc in attributes.items():
attr_name = attr
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
continue
if attr_name == "additional_properties" and attr_desc["key"] == "":
@@ -633,7 +696,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if isinstance(new_attr, list):
serialized.extend(new_attr) # type: ignore
elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
if "name" not in getattr(orig_attr, "_xml_map", {}):
splitted_tag = new_attr.tag.split("}")
if len(splitted_tag) == 2: # Namespace
@@ -664,17 +728,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
raise SerializationError(msg) from err
- else:
- return serialized
+ return serialized
def body(self, data, data_type, **kwargs):
"""Serialize data intended for a request body.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: dict
:raises: SerializationError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized request body
"""
# Just in case this is a dict
@@ -703,7 +767,7 @@ def body(self, data, data_type, **kwargs):
attribute_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
]
- data = deserializer._deserialize(data_type, data)
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
except DeserializationError as err:
raise SerializationError("Unable to build a model: " + str(err)) from err
@@ -712,9 +776,11 @@ def body(self, data, data_type, **kwargs):
def url(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL path.
- :param data: The data to be serialized.
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
+ :returns: The serialized URL path
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
"""
@@ -728,21 +794,20 @@ def url(self, name, data, data_type, **kwargs):
output = output.replace("{", quote("{")).replace("}", quote("}"))
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return output
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
def query(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL query.
- :param data: The data to be serialized.
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :keyword bool skip_quote: Whether to skip quote the serialized result.
- Defaults to False.
:rtype: str, list
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized query parameter
"""
try:
# Treat the list aside, since we don't want to encode the div separator
@@ -759,19 +824,20 @@ def query(self, name, data, data_type, **kwargs):
output = str(output)
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def header(self, name, data, data_type, **kwargs):
"""Serialize data intended for a request header.
- :param data: The data to be serialized.
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized header
"""
try:
if data_type in ["[str]"]:
@@ -780,21 +846,20 @@ def header(self, name, data, data_type, **kwargs):
output = self.serialize_data(data, data_type, **kwargs)
if data_type == "bool":
output = json.dumps(output)
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def serialize_data(self, data, data_type, **kwargs):
"""Serialize generic data according to supplied data type.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :param bool required: Whether it's essential that the data not be
- empty or None
:raises: AttributeError if required data is None.
:raises: ValueError if data is None
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
"""
if data is None:
raise ValueError("No value for given attribute")
@@ -805,7 +870,7 @@ def serialize_data(self, data, data_type, **kwargs):
if data_type in self.basic_types.values():
return self.serialize_basic(data, data_type, **kwargs)
- elif data_type in self.serialize_type:
+ if data_type in self.serialize_type:
return self.serialize_type[data_type](data, **kwargs)
# If dependencies is empty, try with current data class
@@ -821,11 +886,10 @@ def serialize_data(self, data, data_type, **kwargs):
except (ValueError, TypeError) as err:
msg = "Unable to serialize value: {!r} as type: {!r}."
raise SerializationError(msg.format(data, data_type)) from err
- else:
- return self._serialize(data, **kwargs)
+ return self._serialize(data, **kwargs)
@classmethod
- def _get_custom_serializers(cls, data_type, **kwargs):
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
if custom_serializer:
return custom_serializer
@@ -841,23 +905,26 @@ def serialize_basic(cls, data, data_type, **kwargs):
- basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- is_xml bool : If set, use xml_basic_types_serializers
- :param data: Object to be serialized.
+ :param obj data: Object to be serialized.
:param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
@classmethod
def serialize_unicode(cls, data):
"""Special handling for serializing unicode strings in Py2.
Encode to UTF-8 if unicode, otherwise handle as a str.
- :param data: Object to be serialized.
+ :param str data: Object to be serialized.
:rtype: str
+ :return: serialized object
"""
try: # If I received an enum, return its value
return data.value
@@ -871,8 +938,7 @@ def serialize_unicode(cls, data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
def serialize_iter(self, data, iter_type, div=None, **kwargs):
"""Serialize iterable.
@@ -882,15 +948,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
serialization_ctxt['type'] should be same as data_type.
- is_xml bool : If set, serialize as XML
- :param list attr: Object to be serialized.
+ :param list data: Object to be serialized.
:param str iter_type: Type of object in the iterable.
- :param bool required: Whether the objects in the iterable must
- not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
- :keyword bool do_quote: Whether to quote the serialized result of each iterable element.
Defaults to False.
:rtype: list, str
+ :return: serialized iterable
"""
if isinstance(data, str):
raise SerializationError("Refuse str type as a valid iter type.")
@@ -945,9 +1009,8 @@ def serialize_dict(self, attr, dict_type, **kwargs):
:param dict attr: Object to be serialized.
:param str dict_type: Type of object in the dictionary.
- :param bool required: Whether the objects in the dictionary must
- not be None or empty.
:rtype: dict
+ :return: serialized dictionary
"""
serialization_ctxt = kwargs.get("serialization_ctxt", {})
serialized = {}
@@ -971,7 +1034,7 @@ def serialize_dict(self, attr, dict_type, **kwargs):
return serialized
- def serialize_object(self, attr, **kwargs):
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Serialize a generic object.
This will be handled as a dictionary. If object passed in is not
a basic type (str, int, float, dict, list) it will simply be
@@ -979,6 +1042,7 @@ def serialize_object(self, attr, **kwargs):
:param dict attr: Object to be serialized.
:rtype: dict or str
+ :return: serialized object
"""
if attr is None:
return None
@@ -1003,7 +1067,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_decimal(attr)
# If it's a model or I know this dependency, serialize as a Model
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
return self._serialize(attr)
if obj_type == dict:
@@ -1034,56 +1098,61 @@ def serialize_enum(attr, enum_obj=None):
try:
enum_obj(result) # type: ignore
return result
- except ValueError:
+ except ValueError as exc:
for enum_value in enum_obj: # type: ignore
if enum_value.value.lower() == str(attr).lower():
return enum_value.value
error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj))
+ raise SerializationError(error.format(attr, enum_obj)) from exc
@staticmethod
- def serialize_bytearray(attr, **kwargs):
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize bytearray into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
return b64encode(attr).decode()
@staticmethod
- def serialize_base64(attr, **kwargs):
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize str into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
encoded = b64encode(attr).decode("ascii")
return encoded.strip("=").replace("+", "-").replace("/", "_")
@staticmethod
- def serialize_decimal(attr, **kwargs):
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Decimal object to float.
- :param attr: Object to be serialized.
+ :param decimal attr: Object to be serialized.
:rtype: float
+ :return: serialized decimal
"""
return float(attr)
@staticmethod
- def serialize_long(attr, **kwargs):
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize long (Py2) or int (Py3).
- :param attr: Object to be serialized.
+ :param int attr: Object to be serialized.
:rtype: int/long
+ :return: serialized long
"""
return _long_type(attr)
@staticmethod
- def serialize_date(attr, **kwargs):
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Date object into ISO-8601 formatted string.
:param Date attr: Object to be serialized.
:rtype: str
+ :return: serialized date
"""
if isinstance(attr, str):
attr = isodate.parse_date(attr)
@@ -1091,11 +1160,12 @@ def serialize_date(attr, **kwargs):
return t
@staticmethod
- def serialize_time(attr, **kwargs):
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Time object into ISO-8601 formatted string.
:param datetime.time attr: Object to be serialized.
:rtype: str
+ :return: serialized time
"""
if isinstance(attr, str):
attr = isodate.parse_time(attr)
@@ -1105,30 +1175,32 @@ def serialize_time(attr, **kwargs):
return t
@staticmethod
- def serialize_duration(attr, **kwargs):
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize TimeDelta object into ISO-8601 formatted string.
:param TimeDelta attr: Object to be serialized.
:rtype: str
+ :return: serialized duration
"""
if isinstance(attr, str):
attr = isodate.parse_duration(attr)
return isodate.duration_isoformat(attr)
@staticmethod
- def serialize_rfc(attr, **kwargs):
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into RFC-1123 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: TypeError if format invalid.
+ :return: serialized rfc
"""
try:
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
utc = attr.utctimetuple()
- except AttributeError:
- raise TypeError("RFC1123 object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
Serializer.days[utc.tm_wday],
@@ -1141,12 +1213,13 @@ def serialize_rfc(attr, **kwargs):
)
@staticmethod
- def serialize_iso(attr, **kwargs):
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: SerializationError if format invalid.
+ :return: serialized iso
"""
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
@@ -1172,13 +1245,14 @@ def serialize_iso(attr, **kwargs):
raise TypeError(msg) from err
@staticmethod
- def serialize_unix(attr, **kwargs):
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into IntTime format.
This is represented as seconds.
:param Datetime attr: Object to be serialized.
:rtype: int
:raises: SerializationError if format invalid
+ :return: serialied unix
"""
if isinstance(attr, int):
return attr
@@ -1186,11 +1260,11 @@ def serialize_unix(attr, **kwargs):
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError:
- raise TypeError("Unix time object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
-def rest_key_extractor(attr, attr_desc, data):
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
key = attr_desc["key"]
working_data = data
@@ -1211,7 +1285,9 @@ def rest_key_extractor(attr, attr_desc, data):
return working_data.get(key)
-def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
key = attr_desc["key"]
working_data = data
@@ -1232,17 +1308,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
return attribute_key_case_insensitive_extractor(key, None, working_data)
-def last_rest_key_extractor(attr, attr_desc, data):
- """Extract the attribute in "data" based on the last part of the JSON path key."""
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
return attribute_key_extractor(dict_keys[-1], None, data)
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
"""Extract the attribute in "data" based on the last part of the JSON path key.
This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
"""
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
@@ -1279,7 +1367,7 @@ def _extract_name_from_internal_type(internal_type):
return xml_name
-def xml_key_extractor(attr, attr_desc, data):
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
if isinstance(data, dict):
return None
@@ -1331,22 +1419,21 @@ def xml_key_extractor(attr, attr_desc, data):
if is_iter_type:
if is_wrapped:
return None # is_wrapped no node, we want None
- else:
- return [] # not wrapped, assume empty list
+ return [] # not wrapped, assume empty list
return None # Assume it's not there, maybe an optional node.
# If is_iter_type and not wrapped, return all found children
if is_iter_type:
if not is_wrapped:
return children
- else: # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
)
- return list(children[0]) # Might be empty list and that's ok.
+ )
+ return list(children[0]) # Might be empty list and that's ok.
# Here it's not a itertype, we should have found one element only or empty
if len(children) > 1:
@@ -1354,7 +1441,7 @@ def xml_key_extractor(attr, attr_desc, data):
return children[0]
-class Deserializer(object):
+class Deserializer:
"""Response object model deserializer.
:param dict classes: Class type dictionary for deserializing complex types.
@@ -1363,9 +1450,9 @@ class Deserializer(object):
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.deserialize_type = {
"iso-8601": Deserializer.deserialize_iso,
"rfc-1123": Deserializer.deserialize_rfc,
@@ -1403,11 +1490,12 @@ def __call__(self, target_obj, response_data, content_type=None):
:param str content_type: Swagger "produces" if available.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
data = self._unpack_content(response_data, content_type)
return self._deserialize(target_obj, data)
- def _deserialize(self, target_obj, data):
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
"""Call the deserializer on a model.
Data needs to be already deserialized as JSON or XML ElementTree
@@ -1416,12 +1504,13 @@ def _deserialize(self, target_obj, data):
:param object data: Object to deserialize.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
# This is already a model, go recursive just in case
if hasattr(data, "_attribute_map"):
constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
try:
- for attr, mapconfig in data._attribute_map.items():
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
if attr in constants:
continue
value = getattr(data, attr)
@@ -1440,13 +1529,13 @@ def _deserialize(self, target_obj, data):
if isinstance(response, str):
return self.deserialize_data(data, response)
- elif isinstance(response, type) and issubclass(response, Enum):
+ if isinstance(response, type) and issubclass(response, Enum):
return self.deserialize_enum(data, response)
if data is None or data is CoreNull:
return data
try:
- attributes = response._attribute_map # type: ignore
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
d_attrs = {}
for attr, attr_desc in attributes.items():
# Check empty string. If it's not empty, someone has a real "additionalProperties"...
@@ -1476,9 +1565,8 @@ def _deserialize(self, target_obj, data):
except (AttributeError, TypeError, KeyError) as err:
msg = "Unable to deserialize to object: " + class_name # type: ignore
raise DeserializationError(msg) from err
- else:
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
def _build_additional_properties(self, attribute_map, data):
if not self.additional_properties_detection:
@@ -1505,6 +1593,8 @@ def _classify_target(self, target, data):
:param str target: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
"""
if target is None:
return None, None
@@ -1516,7 +1606,7 @@ def _classify_target(self, target, data):
return target, target
try:
- target = target._classify(data, self.dependencies) # type: ignore
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
except AttributeError:
pass # Target is not a Model, no classify
return target, target.__class__.__name__ # type: ignore
@@ -1531,10 +1621,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None):
:param str target_obj: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
:param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
"""
try:
return self(target_obj, data, content_type=content_type)
- except:
+ except: # pylint: disable=bare-except
_LOGGER.debug(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -1552,10 +1644,12 @@ def _unpack_content(raw_data, content_type=None):
If raw_data is something else, bypass all logic and return it directly.
- :param raw_data: Data to be processed.
- :param content_type: How to parse if raw_data is a string/bytes.
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
:raises JSONDecodeError: If JSON is requested and parsing is impossible.
:raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
"""
# Assume this is enough to detect a Pipeline Response without importing it
context = getattr(raw_data, "context", {})
@@ -1579,24 +1673,35 @@ def _unpack_content(raw_data, content_type=None):
def _instantiate_model(self, response, attrs, additional_properties=None):
"""Instantiate a response model passing in deserialized args.
- :param response: The response model class.
- :param d_attrs: The deserialized response attributes.
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
"""
if callable(response):
subtype = getattr(response, "_subtype_map", {})
try:
- readonly = [k for k, v in response._validation.items() if v.get("readonly")]
- const = [k for k, v in response._validation.items() if v.get("constant")]
+ readonly = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("readonly")
+ ]
+ const = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("constant")
+ ]
kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
response_obj = response(**kwargs)
for attr in readonly:
setattr(response_obj, attr, attrs.get(attr))
if additional_properties:
- response_obj.additional_properties = additional_properties
+ response_obj.additional_properties = additional_properties # type: ignore
return response_obj
except TypeError as err:
msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err))
+ raise DeserializationError(msg + str(err)) from err
else:
try:
for attr, value in attrs.items():
@@ -1605,15 +1710,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
except Exception as exp:
msg = "Unable to populate response model. "
msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg)
+ raise DeserializationError(msg) from exp
- def deserialize_data(self, data, data_type):
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
"""Process data for deserialization according to data type.
:param str data: The response string to be deserialized.
:param str data_type: The type to deserialize to.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
if data is None:
return data
@@ -1627,7 +1733,11 @@ def deserialize_data(self, data, data_type):
if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
return data
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
return None
data_val = self.deserialize_type[data_type](data)
@@ -1647,14 +1757,14 @@ def deserialize_data(self, data, data_type):
msg = "Unable to deserialize response data."
msg += " Data: {}, {}".format(data, data_type)
raise DeserializationError(msg) from err
- else:
- return self._deserialize(obj_type, data)
+ return self._deserialize(obj_type, data)
def deserialize_iter(self, attr, iter_type):
"""Deserialize an iterable.
:param list attr: Iterable to be deserialized.
:param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
:rtype: list
"""
if attr is None:
@@ -1671,6 +1781,7 @@ def deserialize_dict(self, attr, dict_type):
:param dict/list attr: Dictionary to be deserialized. Also accepts
a list of key, value pairs.
:param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
:rtype: dict
"""
if isinstance(attr, list):
@@ -1681,11 +1792,12 @@ def deserialize_dict(self, attr, dict_type):
attr = {el.tag: el.text for el in attr}
return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
- def deserialize_object(self, attr, **kwargs):
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Deserialize a generic object.
This will be handled as a dictionary.
:param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
:rtype: dict
:raises: TypeError if non-builtin datatype encountered.
"""
@@ -1720,11 +1832,10 @@ def deserialize_object(self, attr, **kwargs):
pass
return deserialized
- else:
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
- def deserialize_basic(self, attr, data_type):
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
"""Deserialize basic builtin data type from string.
Will attempt to convert to str, int, float and bool.
This function will also accept '1', '0', 'true' and 'false' as
@@ -1732,6 +1843,7 @@ def deserialize_basic(self, attr, data_type):
:param str attr: response string to be deserialized.
:param str data_type: deserialization data type.
+ :return: Deserialized basic type.
:rtype: str, int, float or bool
:raises: TypeError if string format is not valid.
"""
@@ -1743,24 +1855,23 @@ def deserialize_basic(self, attr, data_type):
if data_type == "str":
# None or '', node is empty string.
return ""
- else:
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
if data_type == "bool":
if attr in [True, False, 1, 0]:
return bool(attr)
- elif isinstance(attr, str):
+ if isinstance(attr, str):
if attr.lower() in ["true", "1"]:
return True
- elif attr.lower() in ["false", "0"]:
+ if attr.lower() in ["false", "0"]:
return False
raise TypeError("Invalid boolean value: {}".format(attr))
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
@staticmethod
def deserialize_unicode(data):
@@ -1768,6 +1879,7 @@ def deserialize_unicode(data):
as a string.
:param str data: response string to be deserialized.
+ :return: Deserialized string.
:rtype: str or unicode
"""
# We might be here because we have an enum modeled as string,
@@ -1781,8 +1893,7 @@ def deserialize_unicode(data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
@staticmethod
def deserialize_enum(data, enum_obj):
@@ -1794,6 +1905,7 @@ def deserialize_enum(data, enum_obj):
:param str data: Response string to be deserialized. If this value is
None or invalid it will be returned as-is.
:param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
:rtype: Enum
"""
if isinstance(data, enum_obj) or data is None:
@@ -1804,9 +1916,9 @@ def deserialize_enum(data, enum_obj):
# Workaround. We might consider remove it in the future.
try:
return list(enum_obj.__members__.values())[data]
- except IndexError:
+ except IndexError as exc:
error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj))
+ raise DeserializationError(error.format(data, enum_obj)) from exc
try:
return enum_obj(str(data))
except ValueError:
@@ -1822,6 +1934,7 @@ def deserialize_bytearray(attr):
"""Deserialize string into bytearray.
:param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1834,6 +1947,7 @@ def deserialize_base64(attr):
"""Deserialize base64 encoded string into string.
:param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1849,8 +1963,9 @@ def deserialize_decimal(attr):
"""Deserialize string into Decimal object.
:param str attr: response string to be deserialized.
- :rtype: Decimal
+ :return: Deserialized decimal
:raises: DeserializationError if string format invalid.
+ :rtype: decimal
"""
if isinstance(attr, ET.Element):
attr = attr.text
@@ -1865,6 +1980,7 @@ def deserialize_long(attr):
"""Deserialize string into long (Py2) or int (Py3).
:param str attr: response string to be deserialized.
+ :return: Deserialized int
:rtype: long or int
:raises: ValueError if string format invalid.
"""
@@ -1877,6 +1993,7 @@ def deserialize_duration(attr):
"""Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
+ :return: Deserialized duration
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
"""
@@ -1887,14 +2004,14 @@ def deserialize_duration(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
raise DeserializationError(msg) from err
- else:
- return duration
+ return duration
@staticmethod
def deserialize_date(attr):
"""Deserialize ISO-8601 formatted string into Date object.
:param str attr: response string to be deserialized.
+ :return: Deserialized date
:rtype: Date
:raises: DeserializationError if string format invalid.
"""
@@ -1910,6 +2027,7 @@ def deserialize_time(attr):
"""Deserialize ISO-8601 formatted string into time object.
:param str attr: response string to be deserialized.
+ :return: Deserialized time
:rtype: datetime.time
:raises: DeserializationError if string format invalid.
"""
@@ -1924,6 +2042,7 @@ def deserialize_rfc(attr):
"""Deserialize RFC-1123 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1939,14 +2058,14 @@ def deserialize_rfc(attr):
except ValueError as err:
msg = "Cannot deserialize to rfc datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_iso(attr):
"""Deserialize ISO-8601 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1976,8 +2095,7 @@ def deserialize_iso(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_unix(attr):
@@ -1985,6 +2103,7 @@ def deserialize_unix(attr):
This is represented as seconds.
:param int attr: Object to be serialized.
+ :return: Deserialized datetime
:rtype: Datetime
:raises: DeserializationError if format invalid
"""
@@ -1996,5 +2115,4 @@ def deserialize_unix(attr):
except ValueError as err:
msg = "Cannot deserialize to unix datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_vendor.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_vendor.py
index cb83e222aa65..77cb1c90bb65 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_vendor.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/_vendor.py
@@ -11,7 +11,6 @@
from ._configuration import DataBoxManagementClientConfiguration
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core import PipelineClient
from ._serialization import Deserializer, Serializer
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/__init__.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/__init__.py
index 955dbd6c6677..f6e63b92052f 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/__init__.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/__init__.py
@@ -5,12 +5,18 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._data_box_management_client import DataBoxManagementClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._data_box_management_client import DataBoxManagementClient # type: ignore
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -18,6 +24,6 @@
__all__ = [
"DataBoxManagementClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/_configuration.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/_configuration.py
index 80fd16c751a5..10d8ee9e5d23 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/_configuration.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/_configuration.py
@@ -14,11 +14,10 @@
from .._version import VERSION
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
-class DataBoxManagementClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+class DataBoxManagementClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for DataBoxManagementClient.
Note that all parameters used to create this instance are saved as instance
@@ -28,13 +27,13 @@ class DataBoxManagementClientConfiguration: # pylint: disable=too-many-instance
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The Subscription Id. Required.
:type subscription_id: str
- :keyword api_version: Api Version. Default value is "2022-12-01". Note that overriding this
+ :keyword api_version: Api Version. Default value is "2025-02-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None:
- api_version: str = kwargs.pop("api_version", "2022-12-01")
+ api_version: str = kwargs.pop("api_version", "2025-02-01")
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/_data_box_management_client.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/_data_box_management_client.py
index 0b484eb1e31b..87b3f4859c2e 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/_data_box_management_client.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/_data_box_management_client.py
@@ -21,13 +21,10 @@
from .operations import DataBoxManagementClientOperationsMixin, JobsOperations, Operations, ServiceOperations
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
-class DataBoxManagementClient(
- DataBoxManagementClientOperationsMixin
-): # pylint: disable=client-accepts-api-version-keyword
+class DataBoxManagementClient(DataBoxManagementClientOperationsMixin):
"""The DataBox Client.
:ivar operations: Operations operations
@@ -42,7 +39,7 @@ class DataBoxManagementClient(
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
- :keyword api_version: Api Version. Default value is "2022-12-01". Note that overriding this
+ :keyword api_version: Api Version. Default value is "2025-02-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/_vendor.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/_vendor.py
index b75cb04eacbd..9f207543d353 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/_vendor.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/_vendor.py
@@ -11,7 +11,6 @@
from ._configuration import DataBoxManagementClientConfiguration
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core import AsyncPipelineClient
from .._serialization import Deserializer, Serializer
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/__init__.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/__init__.py
index d44a1996902a..3e73db476be2 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/__init__.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/__init__.py
@@ -5,14 +5,20 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._operations import Operations
-from ._jobs_operations import JobsOperations
-from ._data_box_management_client_operations import DataBoxManagementClientOperationsMixin
-from ._service_operations import ServiceOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._operations import Operations # type: ignore
+from ._jobs_operations import JobsOperations # type: ignore
+from ._data_box_management_client_operations import DataBoxManagementClientOperationsMixin # type: ignore
+from ._service_operations import ServiceOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -21,5 +27,5 @@
"DataBoxManagementClientOperationsMixin",
"ServiceOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_data_box_management_client_operations.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_data_box_management_client_operations.py
index c6a594c237d7..d892e707bc95 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_data_box_management_client_operations.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_data_box_management_client_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -31,7 +30,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -39,7 +38,7 @@
class DataBoxManagementClientOperationsMixin(DataBoxManagementClientMixinABC):
@overload
- async def mitigate( # pylint: disable=inconsistent-return-statements
+ async def mitigate(
self,
job_name: str,
resource_group_name: str,
@@ -67,7 +66,7 @@ async def mitigate( # pylint: disable=inconsistent-return-statements
"""
@overload
- async def mitigate( # pylint: disable=inconsistent-return-statements
+ async def mitigate(
self,
job_name: str,
resource_group_name: str,
@@ -95,7 +94,7 @@ async def mitigate( # pylint: disable=inconsistent-return-statements
"""
@distributed_trace_async
- async def mitigate( # pylint: disable=inconsistent-return-statements
+ async def mitigate(
self,
job_name: str,
resource_group_name: str,
@@ -117,7 +116,7 @@ async def mitigate( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_jobs_operations.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_jobs_operations.py
index a848bf492053..f59b415e8db5 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_jobs_operations.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_jobs_operations.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines,too-many-statements
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +8,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -48,7 +48,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -89,7 +89,7 @@ def list(self, skip_token: Optional[str] = None, **kwargs: Any) -> AsyncIterable
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.JobResourceList] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -152,7 +152,7 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
@overload
- async def mark_devices_shipped( # pylint: disable=inconsistent-return-statements
+ async def mark_devices_shipped(
self,
job_name: str,
resource_group_name: str,
@@ -180,7 +180,7 @@ async def mark_devices_shipped( # pylint: disable=inconsistent-return-statement
"""
@overload
- async def mark_devices_shipped( # pylint: disable=inconsistent-return-statements
+ async def mark_devices_shipped(
self,
job_name: str,
resource_group_name: str,
@@ -208,7 +208,7 @@ async def mark_devices_shipped( # pylint: disable=inconsistent-return-statement
"""
@distributed_trace_async
- async def mark_devices_shipped( # pylint: disable=inconsistent-return-statements
+ async def mark_devices_shipped(
self,
job_name: str,
resource_group_name: str,
@@ -231,7 +231,7 @@ async def mark_devices_shipped( # pylint: disable=inconsistent-return-statement
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -303,7 +303,7 @@ def list_by_resource_group(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.JobResourceList] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -385,7 +385,7 @@ async def get(
:rtype: ~azure.mgmt.databox.models.JobResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -436,7 +436,7 @@ async def _create_initial(
job_resource: Union[_models.JobResource, IO[bytes]],
**kwargs: Any
) -> AsyncIterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -629,7 +629,7 @@ def get_long_running_output(pipeline_response):
)
async def _delete_initial(self, resource_group_name: str, job_name: str, **kwargs: Any) -> AsyncIterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -743,7 +743,7 @@ async def _update_initial(
if_match: Optional[str] = None,
**kwargs: Any
) -> AsyncIterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1030,7 +1030,7 @@ async def book_shipment_pick_up(
:rtype: ~azure.mgmt.databox.models.ShipmentPickUpResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1086,7 +1086,7 @@ async def book_shipment_pick_up(
return deserialized # type: ignore
@overload
- async def cancel( # pylint: disable=inconsistent-return-statements
+ async def cancel(
self,
resource_group_name: str,
job_name: str,
@@ -1114,7 +1114,7 @@ async def cancel( # pylint: disable=inconsistent-return-statements
"""
@overload
- async def cancel( # pylint: disable=inconsistent-return-statements
+ async def cancel(
self,
resource_group_name: str,
job_name: str,
@@ -1142,7 +1142,7 @@ async def cancel( # pylint: disable=inconsistent-return-statements
"""
@distributed_trace_async
- async def cancel( # pylint: disable=inconsistent-return-statements
+ async def cancel(
self,
resource_group_name: str,
job_name: str,
@@ -1164,7 +1164,7 @@ async def cancel( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1239,7 +1239,7 @@ def list_credentials(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.UnencryptedCredentialsList] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_operations.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_operations.py
index b35a258ce157..88f8b5f73c3e 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_operations.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,7 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
-from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar
+from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -31,7 +30,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -69,7 +68,7 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.OperationList] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_service_operations.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_service_operations.py
index ae04046ce90b..d7d6b908cd02 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_service_operations.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/aio/operations/_service_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -40,7 +39,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -147,7 +146,7 @@ def list_available_skus_by_resource_group(
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.AvailableSkusResult] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -279,7 +278,7 @@ async def validate_address(
:rtype: ~azure.mgmt.databox.models.AddressValidationOutput
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -406,7 +405,7 @@ async def validate_inputs_by_resource_group(
:rtype: ~azure.mgmt.databox.models.ValidationResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -517,7 +516,7 @@ async def validate_inputs(
:rtype: ~azure.mgmt.databox.models.ValidationResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -641,7 +640,7 @@ async def region_configuration(
:rtype: ~azure.mgmt.databox.models.RegionConfigurationResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -775,7 +774,7 @@ async def region_configuration_by_resource_group(
:rtype: ~azure.mgmt.databox.models.RegionConfigurationResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/models/__init__.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/models/__init__.py
index d2ac96b6f12e..9b58e393f8f7 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/models/__init__.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/models/__init__.py
@@ -5,164 +5,182 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._models_py3 import AccountCredentialDetails
-from ._models_py3 import AdditionalErrorInfo
-from ._models_py3 import AddressValidationOutput
-from ._models_py3 import AddressValidationProperties
-from ._models_py3 import ApiError
-from ._models_py3 import ApplianceNetworkConfiguration
-from ._models_py3 import ArmBaseObject
-from ._models_py3 import AvailableSkuRequest
-from ._models_py3 import AvailableSkusResult
-from ._models_py3 import AzureFileFilterDetails
-from ._models_py3 import BlobFilterDetails
-from ._models_py3 import CancellationReason
-from ._models_py3 import CloudError
-from ._models_py3 import ContactDetails
-from ._models_py3 import ContactInfo
-from ._models_py3 import CopyLogDetails
-from ._models_py3 import CopyProgress
-from ._models_py3 import CreateJobValidations
-from ._models_py3 import CreateOrderLimitForSubscriptionValidationRequest
-from ._models_py3 import CreateOrderLimitForSubscriptionValidationResponseProperties
-from ._models_py3 import CustomerDiskJobSecrets
-from ._models_py3 import DataAccountDetails
-from ._models_py3 import DataBoxAccountCopyLogDetails
-from ._models_py3 import DataBoxCustomerDiskCopyLogDetails
-from ._models_py3 import DataBoxCustomerDiskCopyProgress
-from ._models_py3 import DataBoxCustomerDiskJobDetails
-from ._models_py3 import DataBoxDiskCopyLogDetails
-from ._models_py3 import DataBoxDiskCopyProgress
-from ._models_py3 import DataBoxDiskGranularCopyLogDetails
-from ._models_py3 import DataBoxDiskGranularCopyProgress
-from ._models_py3 import DataBoxDiskJobDetails
-from ._models_py3 import DataBoxDiskJobSecrets
-from ._models_py3 import DataBoxHeavyAccountCopyLogDetails
-from ._models_py3 import DataBoxHeavyJobDetails
-from ._models_py3 import DataBoxHeavyJobSecrets
-from ._models_py3 import DataBoxHeavySecret
-from ._models_py3 import DataBoxJobDetails
-from ._models_py3 import DataBoxScheduleAvailabilityRequest
-from ._models_py3 import DataBoxSecret
-from ._models_py3 import DataExportDetails
-from ._models_py3 import DataImportDetails
-from ._models_py3 import DataLocationToServiceLocationMap
-from ._models_py3 import DataTransferDetailsValidationRequest
-from ._models_py3 import DataTransferDetailsValidationResponseProperties
-from ._models_py3 import DataboxJobSecrets
-from ._models_py3 import DatacenterAddressInstructionResponse
-from ._models_py3 import DatacenterAddressLocationResponse
-from ._models_py3 import DatacenterAddressRequest
-from ._models_py3 import DatacenterAddressResponse
-from ._models_py3 import DcAccessSecurityCode
-from ._models_py3 import Details
-from ._models_py3 import DeviceErasureDetails
-from ._models_py3 import DiskScheduleAvailabilityRequest
-from ._models_py3 import DiskSecret
-from ._models_py3 import EncryptionPreferences
-from ._models_py3 import ErrorDetail
-from ._models_py3 import ExportDiskDetails
-from ._models_py3 import FilterFileDetails
-from ._models_py3 import GranularCopyLogDetails
-from ._models_py3 import GranularCopyProgress
-from ._models_py3 import HeavyScheduleAvailabilityRequest
-from ._models_py3 import IdentityProperties
-from ._models_py3 import ImportDiskDetails
-from ._models_py3 import JobDeliveryInfo
-from ._models_py3 import JobDetails
-from ._models_py3 import JobResource
-from ._models_py3 import JobResourceList
-from ._models_py3 import JobResourceUpdateParameter
-from ._models_py3 import JobSecrets
-from ._models_py3 import JobStages
-from ._models_py3 import KeyEncryptionKey
-from ._models_py3 import LastMitigationActionOnJob
-from ._models_py3 import ManagedDiskDetails
-from ._models_py3 import MarkDevicesShippedRequest
-from ._models_py3 import MitigateJobRequest
-from ._models_py3 import NotificationPreference
-from ._models_py3 import Operation
-from ._models_py3 import OperationDisplay
-from ._models_py3 import OperationList
-from ._models_py3 import PackageCarrierDetails
-from ._models_py3 import PackageCarrierInfo
-from ._models_py3 import PackageShippingDetails
-from ._models_py3 import Preferences
-from ._models_py3 import PreferencesValidationRequest
-from ._models_py3 import PreferencesValidationResponseProperties
-from ._models_py3 import RegionConfigurationRequest
-from ._models_py3 import RegionConfigurationResponse
-from ._models_py3 import Resource
-from ._models_py3 import ResourceIdentity
-from ._models_py3 import ReverseShippingDetails
-from ._models_py3 import ScheduleAvailabilityRequest
-from ._models_py3 import ScheduleAvailabilityResponse
-from ._models_py3 import ShareCredentialDetails
-from ._models_py3 import ShipmentPickUpRequest
-from ._models_py3 import ShipmentPickUpResponse
-from ._models_py3 import ShippingAddress
-from ._models_py3 import Sku
-from ._models_py3 import SkuAvailabilityValidationRequest
-from ._models_py3 import SkuAvailabilityValidationResponseProperties
-from ._models_py3 import SkuCapacity
-from ._models_py3 import SkuCost
-from ._models_py3 import SkuInformation
-from ._models_py3 import StorageAccountDetails
-from ._models_py3 import SubscriptionIsAllowedToCreateJobValidationRequest
-from ._models_py3 import SubscriptionIsAllowedToCreateJobValidationResponseProperties
-from ._models_py3 import SystemData
-from ._models_py3 import TransferAllDetails
-from ._models_py3 import TransferConfiguration
-from ._models_py3 import TransferConfigurationTransferAllDetails
-from ._models_py3 import TransferConfigurationTransferFilterDetails
-from ._models_py3 import TransferFilterDetails
-from ._models_py3 import TransportAvailabilityDetails
-from ._models_py3 import TransportAvailabilityRequest
-from ._models_py3 import TransportAvailabilityResponse
-from ._models_py3 import TransportPreferences
-from ._models_py3 import UnencryptedCredentials
-from ._models_py3 import UnencryptedCredentialsList
-from ._models_py3 import UpdateJobDetails
-from ._models_py3 import UserAssignedIdentity
-from ._models_py3 import UserAssignedProperties
-from ._models_py3 import ValidateAddress
-from ._models_py3 import ValidationInputRequest
-from ._models_py3 import ValidationInputResponse
-from ._models_py3 import ValidationRequest
-from ._models_py3 import ValidationResponse
+from typing import TYPE_CHECKING
-from ._data_box_management_client_enums import AccessProtocol
-from ._data_box_management_client_enums import AddressType
-from ._data_box_management_client_enums import AddressValidationStatus
-from ._data_box_management_client_enums import ClassDiscriminator
-from ._data_box_management_client_enums import CopyStatus
-from ._data_box_management_client_enums import CustomerResolutionCode
-from ._data_box_management_client_enums import DataAccountType
-from ._data_box_management_client_enums import DataCenterCode
-from ._data_box_management_client_enums import DatacenterAddressType
-from ._data_box_management_client_enums import DoubleEncryption
-from ._data_box_management_client_enums import FilterFileType
-from ._data_box_management_client_enums import HardwareEncryption
-from ._data_box_management_client_enums import JobDeliveryType
-from ._data_box_management_client_enums import KekType
-from ._data_box_management_client_enums import LogCollectionLevel
-from ._data_box_management_client_enums import NotificationStageName
-from ._data_box_management_client_enums import OverallValidationStatus
-from ._data_box_management_client_enums import ReverseShippingDetailsEditStatus
-from ._data_box_management_client_enums import ReverseTransportPreferenceEditStatus
-from ._data_box_management_client_enums import ShareDestinationFormatType
-from ._data_box_management_client_enums import SkuDisabledReason
-from ._data_box_management_client_enums import SkuName
-from ._data_box_management_client_enums import StageName
-from ._data_box_management_client_enums import StageStatus
-from ._data_box_management_client_enums import TransferConfigurationType
-from ._data_box_management_client_enums import TransferType
-from ._data_box_management_client_enums import TransportShipmentTypes
-from ._data_box_management_client_enums import ValidationInputDiscriminator
-from ._data_box_management_client_enums import ValidationStatus
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+
+from ._models_py3 import ( # type: ignore
+ AccountCredentialDetails,
+ AdditionalErrorInfo,
+ AddressValidationOutput,
+ AddressValidationProperties,
+ ApiError,
+ ApplianceNetworkConfiguration,
+ ArmBaseObject,
+ AvailableSkuRequest,
+ AvailableSkusResult,
+ AzureFileFilterDetails,
+ BlobFilterDetails,
+ CancellationReason,
+ CloudError,
+ ContactDetails,
+ ContactInfo,
+ CopyLogDetails,
+ CopyProgress,
+ CreateJobValidations,
+ CreateOrderLimitForSubscriptionValidationRequest,
+ CreateOrderLimitForSubscriptionValidationResponseProperties,
+ CustomerDiskJobSecrets,
+ DataAccountDetails,
+ DataBoxAccountCopyLogDetails,
+ DataBoxCustomerDiskCopyLogDetails,
+ DataBoxCustomerDiskCopyProgress,
+ DataBoxCustomerDiskJobDetails,
+ DataBoxDiskCopyLogDetails,
+ DataBoxDiskCopyProgress,
+ DataBoxDiskGranularCopyLogDetails,
+ DataBoxDiskGranularCopyProgress,
+ DataBoxDiskJobDetails,
+ DataBoxDiskJobSecrets,
+ DataBoxHeavyAccountCopyLogDetails,
+ DataBoxHeavyJobDetails,
+ DataBoxHeavyJobSecrets,
+ DataBoxHeavySecret,
+ DataBoxJobDetails,
+ DataBoxScheduleAvailabilityRequest,
+ DataBoxSecret,
+ DataExportDetails,
+ DataImportDetails,
+ DataLocationToServiceLocationMap,
+ DataTransferDetailsValidationRequest,
+ DataTransferDetailsValidationResponseProperties,
+ DataboxJobSecrets,
+ DatacenterAddressInstructionResponse,
+ DatacenterAddressLocationResponse,
+ DatacenterAddressRequest,
+ DatacenterAddressResponse,
+ DcAccessSecurityCode,
+ Details,
+ DeviceCapabilityDetails,
+ DeviceCapabilityRequest,
+ DeviceCapabilityResponse,
+ DeviceErasureDetails,
+ DiskScheduleAvailabilityRequest,
+ DiskSecret,
+ EncryptionPreferences,
+ ErrorDetail,
+ ExportDiskDetails,
+ FilterFileDetails,
+ GranularCopyLogDetails,
+ GranularCopyProgress,
+ HeavyScheduleAvailabilityRequest,
+ IdentityProperties,
+ ImportDiskDetails,
+ JobDelayDetails,
+ JobDeliveryInfo,
+ JobDetails,
+ JobResource,
+ JobResourceList,
+ JobResourceUpdateParameter,
+ JobSecrets,
+ JobStages,
+ KeyEncryptionKey,
+ LastMitigationActionOnJob,
+ ManagedDiskDetails,
+ MarkDevicesShippedRequest,
+ MitigateJobRequest,
+ NotificationPreference,
+ Operation,
+ OperationDisplay,
+ OperationList,
+ PackageCarrierDetails,
+ PackageCarrierInfo,
+ PackageShippingDetails,
+ Preferences,
+ PreferencesValidationRequest,
+ PreferencesValidationResponseProperties,
+ RegionConfigurationRequest,
+ RegionConfigurationResponse,
+ Resource,
+ ResourceIdentity,
+ ReverseShippingDetails,
+ ScheduleAvailabilityRequest,
+ ScheduleAvailabilityResponse,
+ ShareCredentialDetails,
+ ShipmentPickUpRequest,
+ ShipmentPickUpResponse,
+ ShippingAddress,
+ Sku,
+ SkuAvailabilityValidationRequest,
+ SkuAvailabilityValidationResponseProperties,
+ SkuCapacity,
+ SkuCost,
+ SkuInformation,
+ StorageAccountDetails,
+ SubscriptionIsAllowedToCreateJobValidationRequest,
+ SubscriptionIsAllowedToCreateJobValidationResponseProperties,
+ SystemData,
+ TransferAllDetails,
+ TransferConfiguration,
+ TransferConfigurationTransferAllDetails,
+ TransferConfigurationTransferFilterDetails,
+ TransferFilterDetails,
+ TransportAvailabilityDetails,
+ TransportAvailabilityRequest,
+ TransportAvailabilityResponse,
+ TransportPreferences,
+ UnencryptedCredentials,
+ UnencryptedCredentialsList,
+ UpdateJobDetails,
+ UserAssignedIdentity,
+ UserAssignedProperties,
+ ValidateAddress,
+ ValidationInputRequest,
+ ValidationInputResponse,
+ ValidationRequest,
+ ValidationResponse,
+)
+
+from ._data_box_management_client_enums import ( # type: ignore
+ AccessProtocol,
+ AddressType,
+ AddressValidationStatus,
+ ClassDiscriminator,
+ CopyStatus,
+ CustomerResolutionCode,
+ DataAccountType,
+ DataCenterCode,
+ DatacenterAddressType,
+ DelayNotificationStatus,
+ DoubleEncryption,
+ FilterFileType,
+ HardwareEncryption,
+ JobDeliveryType,
+ KekType,
+ LogCollectionLevel,
+ ModelName,
+ NotificationStageName,
+ OverallValidationStatus,
+ PortalDelayErrorCode,
+ ReverseShippingDetailsEditStatus,
+ ReverseTransportPreferenceEditStatus,
+ ShareDestinationFormatType,
+ SkuDisabledReason,
+ SkuName,
+ StageName,
+ StageStatus,
+ TransferConfigurationType,
+ TransferType,
+ TransportShipmentTypes,
+ ValidationInputDiscriminator,
+ ValidationStatus,
+)
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -217,6 +235,9 @@
"DatacenterAddressResponse",
"DcAccessSecurityCode",
"Details",
+ "DeviceCapabilityDetails",
+ "DeviceCapabilityRequest",
+ "DeviceCapabilityResponse",
"DeviceErasureDetails",
"DiskScheduleAvailabilityRequest",
"DiskSecret",
@@ -229,6 +250,7 @@
"HeavyScheduleAvailabilityRequest",
"IdentityProperties",
"ImportDiskDetails",
+ "JobDelayDetails",
"JobDeliveryInfo",
"JobDetails",
"JobResource",
@@ -300,14 +322,17 @@
"DataAccountType",
"DataCenterCode",
"DatacenterAddressType",
+ "DelayNotificationStatus",
"DoubleEncryption",
"FilterFileType",
"HardwareEncryption",
"JobDeliveryType",
"KekType",
"LogCollectionLevel",
+ "ModelName",
"NotificationStageName",
"OverallValidationStatus",
+ "PortalDelayErrorCode",
"ReverseShippingDetailsEditStatus",
"ReverseTransportPreferenceEditStatus",
"ShareDestinationFormatType",
@@ -321,5 +346,5 @@
"ValidationInputDiscriminator",
"ValidationStatus",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/models/_data_box_management_client_enums.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/models/_data_box_management_client_enums.py
index 0795de2c3514..542e536a1106 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/models/_data_box_management_client_enums.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/models/_data_box_management_client_enums.py
@@ -190,6 +190,24 @@ class DataCenterCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
BN7 = "BN7"
SN6 = "SN6"
BJS20 = "BJS20"
+ BL24 = "BL24"
+ IDC5 = "IDC5"
+ TYO23 = "TYO23"
+ CPQ21 = "CPQ21"
+ NTG20 = "NTG20"
+ DXB23 = "DXB23"
+ DSM11 = "DSM11"
+ OSA23 = "OSA23"
+ AMS25 = "AMS25"
+
+
+class DelayNotificationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Status of notification."""
+
+ ACTIVE = "Active"
+ """Delay is still active"""
+ RESOLVED = "Resolved"
+ """Delay has been resolved"""
class DoubleEncryption(str, Enum, metaclass=CaseInsensitiveEnumMeta):
@@ -211,12 +229,12 @@ class FilterFileType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
class HardwareEncryption(str, Enum, metaclass=CaseInsensitiveEnumMeta):
- """Defines Hardware level encryption (Only for disk)."""
+ """Hardware encryption support for a given sku for a given region."""
ENABLED = "Enabled"
"""Hardware-based encryption is enabled."""
DISABLED = "Disabled"
- """Hardware-based encryption is enabled."""
+ """Hardware-based encryption is disabled."""
class JobDeliveryType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
@@ -246,6 +264,26 @@ class LogCollectionLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Verbose logging (includes Errors, CRC, size information and others)."""
+class ModelName(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """The customer friendly name of the combination of version and capacity of the device. This field
+ is necessary only at the time of ordering the newer generation device i.e. AzureDataBox120 and
+ AzureDataBox525 as of Feb/2025.
+ """
+
+ DATA_BOX = "DataBox"
+ """Data Box."""
+ DATA_BOX_DISK = "DataBoxDisk"
+ """Data Box Disk."""
+ DATA_BOX_HEAVY = "DataBoxHeavy"
+ """Data Box Heavy."""
+ DATA_BOX_CUSTOMER_DISK = "DataBoxCustomerDisk"
+ """Data Box Customer Disk"""
+ AZURE_DATA_BOX120 = "AzureDataBox120"
+ """Data Box V2 with 125TB usable capacity."""
+ AZURE_DATA_BOX525 = "AzureDataBox525"
+ """Data Box V2 with 500TB usable capacity."""
+
+
class NotificationStageName(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Name of the stage."""
@@ -278,6 +316,19 @@ class OverallValidationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Certain input validations skipped."""
+class PortalDelayErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Delay Error code."""
+
+ INTERNAL_ISSUE_DELAY = "InternalIssueDelay"
+ """Delay due to any internal reasons"""
+ ACTIVE_ORDER_LIMIT_BREACHED_DELAY = "ActiveOrderLimitBreachedDelay"
+ """Active Order limit breached."""
+ HIGH_DEMAND_DELAY = "HighDemandDelay"
+ """High demand"""
+ LARGE_NUMBER_OF_FILES_DELAY = "LargeNumberOfFilesDelay"
+ """Slow copy due to large number of files"""
+
+
class ReverseShippingDetailsEditStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The Editable status for Reverse Shipping Address and Contact Info."""
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/models/_models_py3.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/models/_models_py3.py
index ed580288b3ed..96b7fb3230b8 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/models/_models_py3.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/models/_models_py3.py
@@ -1,5 +1,5 @@
-# coding=utf-8
# pylint: disable=too-many-lines
+# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
@@ -16,10 +16,9 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from .. import models as _models
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
@@ -711,7 +710,7 @@ def __init__(self, **kwargs: Any) -> None:
self.copy_log_details_type: Optional[str] = None
-class CopyProgress(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class CopyProgress(_serialization.Model):
"""Copy progress.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -946,6 +945,11 @@ class CreateOrderLimitForSubscriptionValidationRequest(ValidationInputRequest):
:ivar device_type: Device type to be used for the job. Required. Known values are: "DataBox",
"DataBoxDisk", "DataBoxHeavy", and "DataBoxCustomerDisk".
:vartype device_type: str or ~azure.mgmt.databox.models.SkuName
+ :ivar model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :vartype model: str or ~azure.mgmt.databox.models.ModelName
"""
_validation = {
@@ -956,17 +960,30 @@ class CreateOrderLimitForSubscriptionValidationRequest(ValidationInputRequest):
_attribute_map = {
"validation_type": {"key": "validationType", "type": "str"},
"device_type": {"key": "deviceType", "type": "str"},
+ "model": {"key": "model", "type": "str"},
}
- def __init__(self, *, device_type: Union[str, "_models.SkuName"], **kwargs: Any) -> None:
+ def __init__(
+ self,
+ *,
+ device_type: Union[str, "_models.SkuName"],
+ model: Optional[Union[str, "_models.ModelName"]] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword device_type: Device type to be used for the job. Required. Known values are:
"DataBox", "DataBoxDisk", "DataBoxHeavy", and "DataBoxCustomerDisk".
:paramtype device_type: str or ~azure.mgmt.databox.models.SkuName
+ :keyword model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :paramtype model: str or ~azure.mgmt.databox.models.ModelName
"""
super().__init__(**kwargs)
self.validation_type: str = "ValidateCreateOrderLimit"
self.device_type = device_type
+ self.model = model
class CreateOrderLimitForSubscriptionValidationResponseProperties(
@@ -1231,7 +1248,7 @@ def __init__(self, **kwargs: Any) -> None:
self.verbose_log_link = None
-class DataBoxCustomerDiskCopyProgress(CopyProgress): # pylint: disable=too-many-instance-attributes
+class DataBoxCustomerDiskCopyProgress(CopyProgress):
"""DataBox CustomerDisk Copy Progress.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -1338,7 +1355,7 @@ def __init__(self, **kwargs: Any) -> None:
self.copy_status = None
-class JobDetails(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class JobDetails(_serialization.Model):
"""Job details.
You probably want to use the sub-classes and not this class directly. Known sub-classes are:
@@ -1395,7 +1412,7 @@ class JobDetails(_serialization.Model): # pylint: disable=too-many-instance-att
"YTO21", "YQB20", "FRA22", "MAA01", "CPQ02", "CPQ20", "SIN20", "HKG20", "SG2", "MEL23",
"SEL21", "OSA20", "SHA03", "BJB", "JNB22", "JNB21", "MNZ21", "SN8", "AUH20", "ZRH20", "PUS20",
"AdHoc", "CH1", "DSM05", "DUB07", "PNQ01", "SVG20", "OSA02", "OSA22", "PAR22", "BN7", "SN6",
- and "BJS20".
+ "BJS20", "BL24", "IDC5", "TYO23", "CPQ21", "NTG20", "DXB23", "DSM11", "OSA23", and "AMS25".
:vartype data_center_code: str or ~azure.mgmt.databox.models.DataCenterCode
"""
@@ -1502,7 +1519,7 @@ def __init__(
self.data_center_code = None
-class DataBoxCustomerDiskJobDetails(JobDetails): # pylint: disable=too-many-instance-attributes
+class DataBoxCustomerDiskJobDetails(JobDetails):
"""Customer disk job details.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -1556,7 +1573,7 @@ class DataBoxCustomerDiskJobDetails(JobDetails): # pylint: disable=too-many-ins
"YTO21", "YQB20", "FRA22", "MAA01", "CPQ02", "CPQ20", "SIN20", "HKG20", "SG2", "MEL23",
"SEL21", "OSA20", "SHA03", "BJB", "JNB22", "JNB21", "MNZ21", "SN8", "AUH20", "ZRH20", "PUS20",
"AdHoc", "CH1", "DSM05", "DUB07", "PNQ01", "SVG20", "OSA02", "OSA22", "PAR22", "BN7", "SN6",
- and "BJS20".
+ "BJS20", "BL24", "IDC5", "TYO23", "CPQ21", "NTG20", "DXB23", "DSM11", "OSA23", and "AMS25".
:vartype data_center_code: str or ~azure.mgmt.databox.models.DataCenterCode
:ivar import_disk_details_collection: Contains the map of disk serial number to the disk
details for import jobs.
@@ -1861,7 +1878,7 @@ def __init__(self, **kwargs: Any) -> None:
self.verbose_log_link = None
-class GranularCopyProgress(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class GranularCopyProgress(_serialization.Model):
"""Granular Copy progress.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -1971,7 +1988,7 @@ def __init__(self, **kwargs: Any) -> None:
self.actions = None
-class DataBoxDiskGranularCopyProgress(GranularCopyProgress): # pylint: disable=too-many-instance-attributes
+class DataBoxDiskGranularCopyProgress(GranularCopyProgress):
"""DataBox Disk Granular Copy Progress.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -2078,7 +2095,7 @@ def __init__(self, **kwargs: Any) -> None:
self.copy_status = None
-class DataBoxDiskJobDetails(JobDetails): # pylint: disable=too-many-instance-attributes
+class DataBoxDiskJobDetails(JobDetails):
"""DataBox Disk Job Details.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -2132,7 +2149,7 @@ class DataBoxDiskJobDetails(JobDetails): # pylint: disable=too-many-instance-at
"YTO21", "YQB20", "FRA22", "MAA01", "CPQ02", "CPQ20", "SIN20", "HKG20", "SG2", "MEL23",
"SEL21", "OSA20", "SHA03", "BJB", "JNB22", "JNB21", "MNZ21", "SN8", "AUH20", "ZRH20", "PUS20",
"AdHoc", "CH1", "DSM05", "DUB07", "PNQ01", "SVG20", "OSA02", "OSA22", "PAR22", "BN7", "SN6",
- and "BJS20".
+ "BJS20", "BL24", "IDC5", "TYO23", "CPQ21", "NTG20", "DXB23", "DSM11", "OSA23", and "AMS25".
:vartype data_center_code: str or ~azure.mgmt.databox.models.DataCenterCode
:ivar preferred_disks: User preference on what size disks are needed for the job. The map is
from the disk size in TB to the count. Eg. {2,5} means 5 disks of 2 TB size. Key is string but
@@ -2353,7 +2370,7 @@ def __init__(self, **kwargs: Any) -> None:
self.copy_verbose_log_link = None
-class DataBoxHeavyJobDetails(JobDetails): # pylint: disable=too-many-instance-attributes
+class DataBoxHeavyJobDetails(JobDetails):
"""Databox Heavy Device Job Details.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -2407,7 +2424,7 @@ class DataBoxHeavyJobDetails(JobDetails): # pylint: disable=too-many-instance-a
"YTO21", "YQB20", "FRA22", "MAA01", "CPQ02", "CPQ20", "SIN20", "HKG20", "SG2", "MEL23",
"SEL21", "OSA20", "SHA03", "BJB", "JNB22", "JNB21", "MNZ21", "SN8", "AUH20", "ZRH20", "PUS20",
"AdHoc", "CH1", "DSM05", "DUB07", "PNQ01", "SVG20", "OSA02", "OSA22", "PAR22", "BN7", "SN6",
- and "BJS20".
+ "BJS20", "BL24", "IDC5", "TYO23", "CPQ21", "NTG20", "DXB23", "DSM11", "OSA23", and "AMS25".
:vartype data_center_code: str or ~azure.mgmt.databox.models.DataCenterCode
:ivar copy_progress: Copy progress per account.
:vartype copy_progress: list[~azure.mgmt.databox.models.CopyProgress]
@@ -2601,7 +2618,7 @@ def __init__(self, **kwargs: Any) -> None:
self.account_credential_details = None
-class DataBoxJobDetails(JobDetails): # pylint: disable=too-many-instance-attributes
+class DataBoxJobDetails(JobDetails):
"""Databox Job Details.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -2655,7 +2672,7 @@ class DataBoxJobDetails(JobDetails): # pylint: disable=too-many-instance-attrib
"YTO21", "YQB20", "FRA22", "MAA01", "CPQ02", "CPQ20", "SIN20", "HKG20", "SG2", "MEL23",
"SEL21", "OSA20", "SHA03", "BJB", "JNB22", "JNB21", "MNZ21", "SN8", "AUH20", "ZRH20", "PUS20",
"AdHoc", "CH1", "DSM05", "DUB07", "PNQ01", "SVG20", "OSA02", "OSA22", "PAR22", "BN7", "SN6",
- and "BJS20".
+ "BJS20", "BL24", "IDC5", "TYO23", "CPQ21", "NTG20", "DXB23", "DSM11", "OSA23", and "AMS25".
:vartype data_center_code: str or ~azure.mgmt.databox.models.DataCenterCode
:ivar copy_progress: Copy progress per storage account.
:vartype copy_progress: list[~azure.mgmt.databox.models.CopyProgress]
@@ -2825,6 +2842,11 @@ class ScheduleAvailabilityRequest(_serialization.Model):
:vartype sku_name: str or ~azure.mgmt.databox.models.SkuName
:ivar country: Country in which storage location should be supported.
:vartype country: str
+ :ivar model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :vartype model: str or ~azure.mgmt.databox.models.ModelName
"""
_validation = {
@@ -2836,6 +2858,7 @@ class ScheduleAvailabilityRequest(_serialization.Model):
"storage_location": {"key": "storageLocation", "type": "str"},
"sku_name": {"key": "skuName", "type": "str"},
"country": {"key": "country", "type": "str"},
+ "model": {"key": "model", "type": "str"},
}
_subtype_map = {
@@ -2846,7 +2869,14 @@ class ScheduleAvailabilityRequest(_serialization.Model):
}
}
- def __init__(self, *, storage_location: str, country: Optional[str] = None, **kwargs: Any) -> None:
+ def __init__(
+ self,
+ *,
+ storage_location: str,
+ country: Optional[str] = None,
+ model: Optional[Union[str, "_models.ModelName"]] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword storage_location: Location for data transfer. For locations check:
https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01.
@@ -2854,11 +2884,17 @@ def __init__(self, *, storage_location: str, country: Optional[str] = None, **kw
:paramtype storage_location: str
:keyword country: Country in which storage location should be supported.
:paramtype country: str
+ :keyword model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :paramtype model: str or ~azure.mgmt.databox.models.ModelName
"""
super().__init__(**kwargs)
self.storage_location = storage_location
self.sku_name: Optional[str] = None
self.country = country
+ self.model = model
class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest):
@@ -2875,6 +2911,11 @@ class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest):
:vartype sku_name: str or ~azure.mgmt.databox.models.SkuName
:ivar country: Country in which storage location should be supported.
:vartype country: str
+ :ivar model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :vartype model: str or ~azure.mgmt.databox.models.ModelName
"""
_validation = {
@@ -2886,9 +2927,17 @@ class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest):
"storage_location": {"key": "storageLocation", "type": "str"},
"sku_name": {"key": "skuName", "type": "str"},
"country": {"key": "country", "type": "str"},
+ "model": {"key": "model", "type": "str"},
}
- def __init__(self, *, storage_location: str, country: Optional[str] = None, **kwargs: Any) -> None:
+ def __init__(
+ self,
+ *,
+ storage_location: str,
+ country: Optional[str] = None,
+ model: Optional[Union[str, "_models.ModelName"]] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword storage_location: Location for data transfer. For locations check:
https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01.
@@ -2896,8 +2945,13 @@ def __init__(self, *, storage_location: str, country: Optional[str] = None, **kw
:paramtype storage_location: str
:keyword country: Country in which storage location should be supported.
:paramtype country: str
+ :keyword model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :paramtype model: str or ~azure.mgmt.databox.models.ModelName
"""
- super().__init__(storage_location=storage_location, country=country, **kwargs)
+ super().__init__(storage_location=storage_location, country=country, model=model, **kwargs)
self.sku_name: str = "DataBox"
@@ -3030,7 +3084,7 @@ def __init__(self, **kwargs: Any) -> None:
self.communication_instruction = None
-class DatacenterAddressLocationResponse(DatacenterAddressResponse): # pylint: disable=too-many-instance-attributes
+class DatacenterAddressLocationResponse(DatacenterAddressResponse):
"""Datacenter address for given storage location.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -3141,6 +3195,11 @@ class DatacenterAddressRequest(_serialization.Model):
:ivar sku_name: Sku Name for which the data center address requested. Required. Known values
are: "DataBox", "DataBoxDisk", "DataBoxHeavy", and "DataBoxCustomerDisk".
:vartype sku_name: str or ~azure.mgmt.databox.models.SkuName
+ :ivar model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :vartype model: str or ~azure.mgmt.databox.models.ModelName
"""
_validation = {
@@ -3151,9 +3210,17 @@ class DatacenterAddressRequest(_serialization.Model):
_attribute_map = {
"storage_location": {"key": "storageLocation", "type": "str"},
"sku_name": {"key": "skuName", "type": "str"},
+ "model": {"key": "model", "type": "str"},
}
- def __init__(self, *, storage_location: str, sku_name: Union[str, "_models.SkuName"], **kwargs: Any) -> None:
+ def __init__(
+ self,
+ *,
+ storage_location: str,
+ sku_name: Union[str, "_models.SkuName"],
+ model: Optional[Union[str, "_models.ModelName"]] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword storage_location: Storage location. For locations check:
https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01.
@@ -3162,10 +3229,16 @@ def __init__(self, *, storage_location: str, sku_name: Union[str, "_models.SkuNa
:keyword sku_name: Sku Name for which the data center address requested. Required. Known values
are: "DataBox", "DataBoxDisk", "DataBoxHeavy", and "DataBoxCustomerDisk".
:paramtype sku_name: str or ~azure.mgmt.databox.models.SkuName
+ :keyword model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :paramtype model: str or ~azure.mgmt.databox.models.ModelName
"""
super().__init__(**kwargs)
self.storage_location = storage_location
self.sku_name = sku_name
+ self.model = model
class DataExportDetails(_serialization.Model):
@@ -3303,6 +3376,11 @@ class DataTransferDetailsValidationRequest(ValidationInputRequest):
:ivar transfer_type: Type of the transfer. Required. Known values are: "ImportToAzure" and
"ExportFromAzure".
:vartype transfer_type: str or ~azure.mgmt.databox.models.TransferType
+ :ivar model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :vartype model: str or ~azure.mgmt.databox.models.ModelName
"""
_validation = {
@@ -3317,6 +3395,7 @@ class DataTransferDetailsValidationRequest(ValidationInputRequest):
"data_import_details": {"key": "dataImportDetails", "type": "[DataImportDetails]"},
"device_type": {"key": "deviceType", "type": "str"},
"transfer_type": {"key": "transferType", "type": "str"},
+ "model": {"key": "model", "type": "str"},
}
def __init__(
@@ -3326,6 +3405,7 @@ def __init__(
transfer_type: Union[str, "_models.TransferType"],
data_export_details: Optional[List["_models.DataExportDetails"]] = None,
data_import_details: Optional[List["_models.DataImportDetails"]] = None,
+ model: Optional[Union[str, "_models.ModelName"]] = None,
**kwargs: Any
) -> None:
"""
@@ -3340,6 +3420,11 @@ def __init__(
:keyword transfer_type: Type of the transfer. Required. Known values are: "ImportToAzure" and
"ExportFromAzure".
:paramtype transfer_type: str or ~azure.mgmt.databox.models.TransferType
+ :keyword model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :paramtype model: str or ~azure.mgmt.databox.models.ModelName
"""
super().__init__(**kwargs)
self.validation_type: str = "ValidateDataTransferDetails"
@@ -3347,6 +3432,7 @@ def __init__(
self.data_import_details = data_import_details
self.device_type = device_type
self.transfer_type = transfer_type
+ self.model = model
class DataTransferDetailsValidationResponseProperties(ValidationInputResponse): # pylint: disable=name-too-long
@@ -3451,6 +3537,94 @@ def __init__(self, *, code: str, message: str, **kwargs: Any) -> None:
self.message = message
+class DeviceCapabilityDetails(_serialization.Model):
+ """Device capability details for a given sku for a given region.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar hardware_encryption: Hardware encryption support for a given sku for a given region.
+ Known values are: "Enabled" and "Disabled".
+ :vartype hardware_encryption: str or ~azure.mgmt.databox.models.HardwareEncryption
+ """
+
+ _validation = {
+ "hardware_encryption": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "hardware_encryption": {"key": "hardwareEncryption", "type": "str"},
+ }
+
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
+ super().__init__(**kwargs)
+ self.hardware_encryption = None
+
+
+class DeviceCapabilityRequest(_serialization.Model):
+ """Request body to get the device capabilities for given sku.
+
+ :ivar sku_name: Type of the device. Known values are: "DataBox", "DataBoxDisk", "DataBoxHeavy",
+ and "DataBoxCustomerDisk".
+ :vartype sku_name: str or ~azure.mgmt.databox.models.SkuName
+ :ivar model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :vartype model: str or ~azure.mgmt.databox.models.ModelName
+ """
+
+ _attribute_map = {
+ "sku_name": {"key": "skuName", "type": "str"},
+ "model": {"key": "model", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ sku_name: Optional[Union[str, "_models.SkuName"]] = None,
+ model: Optional[Union[str, "_models.ModelName"]] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword sku_name: Type of the device. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", and "DataBoxCustomerDisk".
+ :paramtype sku_name: str or ~azure.mgmt.databox.models.SkuName
+ :keyword model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :paramtype model: str or ~azure.mgmt.databox.models.ModelName
+ """
+ super().__init__(**kwargs)
+ self.sku_name = sku_name
+ self.model = model
+
+
+class DeviceCapabilityResponse(_serialization.Model):
+ """Device capabilities for given sku in a region.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar device_capability_details: List of device capabilities available for a given region and a
+ given sku.
+ :vartype device_capability_details: list[~azure.mgmt.databox.models.DeviceCapabilityDetails]
+ """
+
+ _validation = {
+ "device_capability_details": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "device_capability_details": {"key": "deviceCapabilityDetails", "type": "[DeviceCapabilityDetails]"},
+ }
+
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
+ super().__init__(**kwargs)
+ self.device_capability_details = None
+
+
class DeviceErasureDetails(_serialization.Model):
"""Device erasure details with erasure completion status and erasureordestructionlog sas key.
@@ -3498,6 +3672,11 @@ class DiskScheduleAvailabilityRequest(ScheduleAvailabilityRequest):
:vartype sku_name: str or ~azure.mgmt.databox.models.SkuName
:ivar country: Country in which storage location should be supported.
:vartype country: str
+ :ivar model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :vartype model: str or ~azure.mgmt.databox.models.ModelName
:ivar expected_data_size_in_tera_bytes: The expected size of the data, which needs to be
transferred in this job, in terabytes. Required.
:vartype expected_data_size_in_tera_bytes: int
@@ -3513,6 +3692,7 @@ class DiskScheduleAvailabilityRequest(ScheduleAvailabilityRequest):
"storage_location": {"key": "storageLocation", "type": "str"},
"sku_name": {"key": "skuName", "type": "str"},
"country": {"key": "country", "type": "str"},
+ "model": {"key": "model", "type": "str"},
"expected_data_size_in_tera_bytes": {"key": "expectedDataSizeInTeraBytes", "type": "int"},
}
@@ -3522,6 +3702,7 @@ def __init__(
storage_location: str,
expected_data_size_in_tera_bytes: int,
country: Optional[str] = None,
+ model: Optional[Union[str, "_models.ModelName"]] = None,
**kwargs: Any
) -> None:
"""
@@ -3531,11 +3712,16 @@ def __init__(
:paramtype storage_location: str
:keyword country: Country in which storage location should be supported.
:paramtype country: str
+ :keyword model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :paramtype model: str or ~azure.mgmt.databox.models.ModelName
:keyword expected_data_size_in_tera_bytes: The expected size of the data, which needs to be
transferred in this job, in terabytes. Required.
:paramtype expected_data_size_in_tera_bytes: int
"""
- super().__init__(storage_location=storage_location, country=country, **kwargs)
+ super().__init__(storage_location=storage_location, country=country, model=model, **kwargs)
self.sku_name: str = "DataBoxDisk"
self.expected_data_size_in_tera_bytes = expected_data_size_in_tera_bytes
@@ -3745,6 +3931,11 @@ class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest):
:vartype sku_name: str or ~azure.mgmt.databox.models.SkuName
:ivar country: Country in which storage location should be supported.
:vartype country: str
+ :ivar model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :vartype model: str or ~azure.mgmt.databox.models.ModelName
"""
_validation = {
@@ -3756,9 +3947,17 @@ class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest):
"storage_location": {"key": "storageLocation", "type": "str"},
"sku_name": {"key": "skuName", "type": "str"},
"country": {"key": "country", "type": "str"},
+ "model": {"key": "model", "type": "str"},
}
- def __init__(self, *, storage_location: str, country: Optional[str] = None, **kwargs: Any) -> None:
+ def __init__(
+ self,
+ *,
+ storage_location: str,
+ country: Optional[str] = None,
+ model: Optional[Union[str, "_models.ModelName"]] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword storage_location: Location for data transfer. For locations check:
https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01.
@@ -3766,8 +3965,13 @@ def __init__(self, *, storage_location: str, country: Optional[str] = None, **kw
:paramtype storage_location: str
:keyword country: Country in which storage location should be supported.
:paramtype country: str
+ :keyword model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :paramtype model: str or ~azure.mgmt.databox.models.ModelName
"""
- super().__init__(storage_location=storage_location, country=country, **kwargs)
+ super().__init__(storage_location=storage_location, country=country, model=model, **kwargs)
self.sku_name: str = "DataBoxHeavy"
@@ -3851,6 +4055,50 @@ def __init__(self, *, manifest_file: str, manifest_hash: str, bit_locker_key: st
self.backup_manifest_cloud_path = None
+class JobDelayDetails(_serialization.Model):
+ """Job Delay Notification details.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar status: Status of notification. Known values are: "Active" and "Resolved".
+ :vartype status: str or ~azure.mgmt.databox.models.DelayNotificationStatus
+ :ivar error_code: Delay Error code. Known values are: "InternalIssueDelay",
+ "ActiveOrderLimitBreachedDelay", "HighDemandDelay", and "LargeNumberOfFilesDelay".
+ :vartype error_code: str or ~azure.mgmt.databox.models.PortalDelayErrorCode
+ :ivar description: Description of the delay.
+ :vartype description: str
+ :ivar start_time: Timestamp when the delay notification was created.
+ :vartype start_time: ~datetime.datetime
+ :ivar resolution_time: Timestamp when the delay notification was resolved.
+ :vartype resolution_time: ~datetime.datetime
+ """
+
+ _validation = {
+ "status": {"readonly": True},
+ "error_code": {"readonly": True},
+ "description": {"readonly": True},
+ "start_time": {"readonly": True},
+ "resolution_time": {"readonly": True},
+ }
+
+ _attribute_map = {
+ "status": {"key": "status", "type": "str"},
+ "error_code": {"key": "errorCode", "type": "str"},
+ "description": {"key": "description", "type": "str"},
+ "start_time": {"key": "startTime", "type": "iso-8601"},
+ "resolution_time": {"key": "resolutionTime", "type": "iso-8601"},
+ }
+
+ def __init__(self, **kwargs: Any) -> None:
+ """ """
+ super().__init__(**kwargs)
+ self.status = None
+ self.error_code = None
+ self.description = None
+ self.start_time = None
+ self.resolution_time = None
+
+
class JobDeliveryInfo(_serialization.Model):
"""Additional delivery info.
@@ -3932,7 +4180,7 @@ def __init__(
self.identity = identity
-class JobResource(Resource): # pylint: disable=too-many-instance-attributes
+class JobResource(Resource):
"""Job Resource.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -3985,6 +4233,13 @@ class JobResource(Resource): # pylint: disable=too-many-instance-attributes
"ReadyToDispatchFromAzureDC", "ReadyToReceiveAtAzureDC", "Created", "ShippedToAzureDC",
"AwaitingShipmentDetails", "PreparingToShipFromAzureDC", and "ShippedToCustomer".
:vartype status: str or ~azure.mgmt.databox.models.StageName
+ :ivar delayed_stage: Name of the stage where delay might be present. Known values are:
+ "DeviceOrdered", "DevicePrepared", "Dispatched", "Delivered", "PickedUp", "AtAzureDC",
+ "DataCopy", "Completed", "CompletedWithErrors", "Cancelled", "Failed_IssueReportedAtCustomer",
+ "Failed_IssueDetectedAtAzureDC", "Aborted", "CompletedWithWarnings",
+ "ReadyToDispatchFromAzureDC", "ReadyToReceiveAtAzureDC", "Created", "ShippedToAzureDC",
+ "AwaitingShipmentDetails", "PreparingToShipFromAzureDC", and "ShippedToCustomer".
+ :vartype delayed_stage: str or ~azure.mgmt.databox.models.StageName
:ivar start_time: Time at which the job was started in UTC ISO 8601 format.
:vartype start_time: ~datetime.datetime
:ivar error: Top level error for the job.
@@ -3999,6 +4254,8 @@ class JobResource(Resource): # pylint: disable=too-many-instance-attributes
:vartype delivery_info: ~azure.mgmt.databox.models.JobDeliveryInfo
:ivar is_cancellable_without_fee: Flag to indicate cancellation of scheduled job.
:vartype is_cancellable_without_fee: bool
+ :ivar all_devices_lost: Flag to indicate if all devices associated with the job are lost.
+ :vartype all_devices_lost: bool
"""
_validation = {
@@ -4016,10 +4273,12 @@ class JobResource(Resource): # pylint: disable=too-many-instance-attributes
"reverse_transport_preference_update": {"readonly": True},
"is_prepare_to_ship_enabled": {"readonly": True},
"status": {"readonly": True},
+ "delayed_stage": {"readonly": True},
"start_time": {"readonly": True},
"error": {"readonly": True},
"cancellation_reason": {"readonly": True},
"is_cancellable_without_fee": {"readonly": True},
+ "all_devices_lost": {"readonly": True},
}
_attribute_map = {
@@ -4039,6 +4298,7 @@ class JobResource(Resource): # pylint: disable=too-many-instance-attributes
"reverse_transport_preference_update": {"key": "properties.reverseTransportPreferenceUpdate", "type": "str"},
"is_prepare_to_ship_enabled": {"key": "properties.isPrepareToShipEnabled", "type": "bool"},
"status": {"key": "properties.status", "type": "str"},
+ "delayed_stage": {"key": "properties.delayedStage", "type": "str"},
"start_time": {"key": "properties.startTime", "type": "iso-8601"},
"error": {"key": "properties.error", "type": "CloudError"},
"details": {"key": "properties.details", "type": "JobDetails"},
@@ -4046,9 +4306,10 @@ class JobResource(Resource): # pylint: disable=too-many-instance-attributes
"delivery_type": {"key": "properties.deliveryType", "type": "str"},
"delivery_info": {"key": "properties.deliveryInfo", "type": "JobDeliveryInfo"},
"is_cancellable_without_fee": {"key": "properties.isCancellableWithoutFee", "type": "bool"},
+ "all_devices_lost": {"key": "properties.allDevicesLost", "type": "bool"},
}
- def __init__(
+ def __init__( # pylint: disable=too-many-locals
self,
*,
location: str,
@@ -4097,6 +4358,7 @@ def __init__(
self.reverse_transport_preference_update = None
self.is_prepare_to_ship_enabled = None
self.status = None
+ self.delayed_stage = None
self.start_time = None
self.error = None
self.details = details
@@ -4104,6 +4366,7 @@ def __init__(
self.delivery_type = delivery_type
self.delivery_info = delivery_info
self.is_cancellable_without_fee = None
+ self.all_devices_lost = None
class JobResourceList(_serialization.Model):
@@ -4199,6 +4462,8 @@ class JobStages(_serialization.Model):
:vartype stage_time: ~datetime.datetime
:ivar job_stage_details: Job Stage Details.
:vartype job_stage_details: JSON
+ :ivar delay_information: Delay information for the job stages.
+ :vartype delay_information: list[~azure.mgmt.databox.models.JobDelayDetails]
"""
_validation = {
@@ -4207,6 +4472,7 @@ class JobStages(_serialization.Model):
"stage_status": {"readonly": True},
"stage_time": {"readonly": True},
"job_stage_details": {"readonly": True},
+ "delay_information": {"readonly": True},
}
_attribute_map = {
@@ -4215,6 +4481,7 @@ class JobStages(_serialization.Model):
"stage_status": {"key": "stageStatus", "type": "str"},
"stage_time": {"key": "stageTime", "type": "iso-8601"},
"job_stage_details": {"key": "jobStageDetails", "type": "object"},
+ "delay_information": {"key": "delayInformation", "type": "[JobDelayDetails]"},
}
def __init__(self, **kwargs: Any) -> None:
@@ -4225,6 +4492,7 @@ def __init__(self, **kwargs: Any) -> None:
self.stage_status = None
self.stage_time = None
self.job_stage_details = None
+ self.delay_information = None
class KeyEncryptionKey(_serialization.Model):
@@ -4783,6 +5051,11 @@ class PreferencesValidationRequest(ValidationInputRequest):
:ivar device_type: Device type to be used for the job. Required. Known values are: "DataBox",
"DataBoxDisk", "DataBoxHeavy", and "DataBoxCustomerDisk".
:vartype device_type: str or ~azure.mgmt.databox.models.SkuName
+ :ivar model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :vartype model: str or ~azure.mgmt.databox.models.ModelName
"""
_validation = {
@@ -4794,6 +5067,7 @@ class PreferencesValidationRequest(ValidationInputRequest):
"validation_type": {"key": "validationType", "type": "str"},
"preference": {"key": "preference", "type": "Preferences"},
"device_type": {"key": "deviceType", "type": "str"},
+ "model": {"key": "model", "type": "str"},
}
def __init__(
@@ -4801,6 +5075,7 @@ def __init__(
*,
device_type: Union[str, "_models.SkuName"],
preference: Optional["_models.Preferences"] = None,
+ model: Optional[Union[str, "_models.ModelName"]] = None,
**kwargs: Any
) -> None:
"""
@@ -4809,11 +5084,17 @@ def __init__(
:keyword device_type: Device type to be used for the job. Required. Known values are:
"DataBox", "DataBoxDisk", "DataBoxHeavy", and "DataBoxCustomerDisk".
:paramtype device_type: str or ~azure.mgmt.databox.models.SkuName
+ :keyword model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :paramtype model: str or ~azure.mgmt.databox.models.ModelName
"""
super().__init__(**kwargs)
self.validation_type: str = "ValidatePreferences"
self.preference = preference
self.device_type = device_type
+ self.model = model
class PreferencesValidationResponseProperties(ValidationInputResponse):
@@ -4865,6 +5146,8 @@ class RegionConfigurationRequest(_serialization.Model):
~azure.mgmt.databox.models.TransportAvailabilityRequest
:ivar datacenter_address_request: Request body to get the datacenter address for given sku.
:vartype datacenter_address_request: ~azure.mgmt.databox.models.DatacenterAddressRequest
+ :ivar device_capability_request: Request body to get the device capabilities for a given sku.
+ :vartype device_capability_request: ~azure.mgmt.databox.models.DeviceCapabilityRequest
"""
_attribute_map = {
@@ -4874,6 +5157,7 @@ class RegionConfigurationRequest(_serialization.Model):
"type": "TransportAvailabilityRequest",
},
"datacenter_address_request": {"key": "datacenterAddressRequest", "type": "DatacenterAddressRequest"},
+ "device_capability_request": {"key": "deviceCapabilityRequest", "type": "DeviceCapabilityRequest"},
}
def __init__(
@@ -4882,6 +5166,7 @@ def __init__(
schedule_availability_request: Optional["_models.ScheduleAvailabilityRequest"] = None,
transport_availability_request: Optional["_models.TransportAvailabilityRequest"] = None,
datacenter_address_request: Optional["_models.DatacenterAddressRequest"] = None,
+ device_capability_request: Optional["_models.DeviceCapabilityRequest"] = None,
**kwargs: Any
) -> None:
"""
@@ -4895,11 +5180,15 @@ def __init__(
~azure.mgmt.databox.models.TransportAvailabilityRequest
:keyword datacenter_address_request: Request body to get the datacenter address for given sku.
:paramtype datacenter_address_request: ~azure.mgmt.databox.models.DatacenterAddressRequest
+ :keyword device_capability_request: Request body to get the device capabilities for a given
+ sku.
+ :paramtype device_capability_request: ~azure.mgmt.databox.models.DeviceCapabilityRequest
"""
super().__init__(**kwargs)
self.schedule_availability_request = schedule_availability_request
self.transport_availability_request = transport_availability_request
self.datacenter_address_request = datacenter_address_request
+ self.device_capability_request = device_capability_request
class RegionConfigurationResponse(_serialization.Model):
@@ -4915,12 +5204,15 @@ class RegionConfigurationResponse(_serialization.Model):
~azure.mgmt.databox.models.TransportAvailabilityResponse
:ivar datacenter_address_response: Datacenter address for given sku in a region.
:vartype datacenter_address_response: ~azure.mgmt.databox.models.DatacenterAddressResponse
+ :ivar device_capability_response: Device capabilities available for a given sku in a region.
+ :vartype device_capability_response: ~azure.mgmt.databox.models.DeviceCapabilityResponse
"""
_validation = {
"schedule_availability_response": {"readonly": True},
"transport_availability_response": {"readonly": True},
"datacenter_address_response": {"readonly": True},
+ "device_capability_response": {"readonly": True},
}
_attribute_map = {
@@ -4933,6 +5225,7 @@ class RegionConfigurationResponse(_serialization.Model):
"type": "TransportAvailabilityResponse",
},
"datacenter_address_response": {"key": "datacenterAddressResponse", "type": "DatacenterAddressResponse"},
+ "device_capability_response": {"key": "deviceCapabilityResponse", "type": "DeviceCapabilityResponse"},
}
def __init__(self, **kwargs: Any) -> None:
@@ -4941,6 +5234,7 @@ def __init__(self, **kwargs: Any) -> None:
self.schedule_availability_response = None
self.transport_availability_response = None
self.datacenter_address_response = None
+ self.device_capability_response = None
class ResourceIdentity(_serialization.Model):
@@ -5176,7 +5470,7 @@ def __init__(self, **kwargs: Any) -> None:
self.ready_by_time = None
-class ShippingAddress(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class ShippingAddress(_serialization.Model):
"""Shipping address where customer wishes to receive the device.
All required parameters must be populated in order to send to server.
@@ -5300,6 +5594,11 @@ class Sku(_serialization.Model):
:vartype display_name: str
:ivar family: The sku family.
:vartype family: str
+ :ivar model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :vartype model: str or ~azure.mgmt.databox.models.ModelName
"""
_validation = {
@@ -5310,6 +5609,7 @@ class Sku(_serialization.Model):
"name": {"key": "name", "type": "str"},
"display_name": {"key": "displayName", "type": "str"},
"family": {"key": "family", "type": "str"},
+ "model": {"key": "model", "type": "str"},
}
def __init__(
@@ -5318,6 +5618,7 @@ def __init__(
name: Union[str, "_models.SkuName"],
display_name: Optional[str] = None,
family: Optional[str] = None,
+ model: Optional[Union[str, "_models.ModelName"]] = None,
**kwargs: Any
) -> None:
"""
@@ -5328,11 +5629,17 @@ def __init__(
:paramtype display_name: str
:keyword family: The sku family.
:paramtype family: str
+ :keyword model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :paramtype model: str or ~azure.mgmt.databox.models.ModelName
"""
super().__init__(**kwargs)
self.name = name
self.display_name = display_name
self.family = family
+ self.model = model
class SkuAvailabilityValidationRequest(ValidationInputRequest):
@@ -5357,6 +5664,11 @@ class SkuAvailabilityValidationRequest(ValidationInputRequest):
https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01.
Required.
:vartype location: str
+ :ivar model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :vartype model: str or ~azure.mgmt.databox.models.ModelName
"""
_validation = {
@@ -5373,6 +5685,7 @@ class SkuAvailabilityValidationRequest(ValidationInputRequest):
"transfer_type": {"key": "transferType", "type": "str"},
"country": {"key": "country", "type": "str"},
"location": {"key": "location", "type": "str"},
+ "model": {"key": "model", "type": "str"},
}
def __init__(
@@ -5382,6 +5695,7 @@ def __init__(
transfer_type: Union[str, "_models.TransferType"],
country: str,
location: str,
+ model: Optional[Union[str, "_models.ModelName"]] = None,
**kwargs: Any
) -> None:
"""
@@ -5398,6 +5712,11 @@ def __init__(
https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01.
Required.
:paramtype location: str
+ :keyword model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :paramtype model: str or ~azure.mgmt.databox.models.ModelName
"""
super().__init__(**kwargs)
self.validation_type: str = "ValidateSkuAvailability"
@@ -5405,6 +5724,7 @@ def __init__(
self.transfer_type = transfer_type
self.country = country
self.location = location
+ self.model = model
class SkuAvailabilityValidationResponseProperties(ValidationInputResponse): # pylint: disable=name-too-long
@@ -5453,16 +5773,20 @@ class SkuCapacity(_serialization.Model):
:vartype usable: str
:ivar maximum: Maximum capacity in TB.
:vartype maximum: str
+ :ivar individual_sku_usable: Maximum capacity per device in TB.
+ :vartype individual_sku_usable: str
"""
_validation = {
"usable": {"readonly": True},
"maximum": {"readonly": True},
+ "individual_sku_usable": {"readonly": True},
}
_attribute_map = {
"usable": {"key": "usable", "type": "str"},
"maximum": {"key": "maximum", "type": "str"},
+ "individual_sku_usable": {"key": "individualSkuUsable", "type": "str"},
}
def __init__(self, **kwargs: Any) -> None:
@@ -5470,6 +5794,7 @@ def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
self.usable = None
self.maximum = None
+ self.individual_sku_usable = None
class SkuCost(_serialization.Model):
@@ -5975,20 +6300,38 @@ class TransportAvailabilityRequest(_serialization.Model):
:ivar sku_name: Type of the device. Known values are: "DataBox", "DataBoxDisk", "DataBoxHeavy",
and "DataBoxCustomerDisk".
:vartype sku_name: str or ~azure.mgmt.databox.models.SkuName
+ :ivar model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :vartype model: str or ~azure.mgmt.databox.models.ModelName
"""
_attribute_map = {
"sku_name": {"key": "skuName", "type": "str"},
+ "model": {"key": "model", "type": "str"},
}
- def __init__(self, *, sku_name: Optional[Union[str, "_models.SkuName"]] = None, **kwargs: Any) -> None:
+ def __init__(
+ self,
+ *,
+ sku_name: Optional[Union[str, "_models.SkuName"]] = None,
+ model: Optional[Union[str, "_models.ModelName"]] = None,
+ **kwargs: Any
+ ) -> None:
"""
:keyword sku_name: Type of the device. Known values are: "DataBox", "DataBoxDisk",
"DataBoxHeavy", and "DataBoxCustomerDisk".
:paramtype sku_name: str or ~azure.mgmt.databox.models.SkuName
+ :keyword model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :paramtype model: str or ~azure.mgmt.databox.models.ModelName
"""
super().__init__(**kwargs)
self.sku_name = sku_name
+ self.model = model
class TransportAvailabilityResponse(_serialization.Model):
@@ -6241,6 +6584,11 @@ class ValidateAddress(ValidationInputRequest):
:vartype device_type: str or ~azure.mgmt.databox.models.SkuName
:ivar transport_preferences: Preferences related to the shipment logistics of the sku.
:vartype transport_preferences: ~azure.mgmt.databox.models.TransportPreferences
+ :ivar model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :vartype model: str or ~azure.mgmt.databox.models.ModelName
"""
_validation = {
@@ -6254,6 +6602,7 @@ class ValidateAddress(ValidationInputRequest):
"shipping_address": {"key": "shippingAddress", "type": "ShippingAddress"},
"device_type": {"key": "deviceType", "type": "str"},
"transport_preferences": {"key": "transportPreferences", "type": "TransportPreferences"},
+ "model": {"key": "model", "type": "str"},
}
def __init__(
@@ -6262,6 +6611,7 @@ def __init__(
shipping_address: "_models.ShippingAddress",
device_type: Union[str, "_models.SkuName"],
transport_preferences: Optional["_models.TransportPreferences"] = None,
+ model: Optional[Union[str, "_models.ModelName"]] = None,
**kwargs: Any
) -> None:
"""
@@ -6272,12 +6622,18 @@ def __init__(
:paramtype device_type: str or ~azure.mgmt.databox.models.SkuName
:keyword transport_preferences: Preferences related to the shipment logistics of the sku.
:paramtype transport_preferences: ~azure.mgmt.databox.models.TransportPreferences
+ :keyword model: The customer friendly name of the combination of version and capacity of the
+ device. This field is necessary only at the time of ordering the newer generation device i.e.
+ AzureDataBox120 and AzureDataBox525 as of Feb/2025. Known values are: "DataBox", "DataBoxDisk",
+ "DataBoxHeavy", "DataBoxCustomerDisk", "AzureDataBox120", and "AzureDataBox525".
+ :paramtype model: str or ~azure.mgmt.databox.models.ModelName
"""
super().__init__(**kwargs)
self.validation_type: str = "ValidateAddress"
self.shipping_address = shipping_address
self.device_type = device_type
self.transport_preferences = transport_preferences
+ self.model = model
class ValidationResponse(_serialization.Model):
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/__init__.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/__init__.py
index d44a1996902a..3e73db476be2 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/__init__.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/__init__.py
@@ -5,14 +5,20 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._operations import Operations
-from ._jobs_operations import JobsOperations
-from ._data_box_management_client_operations import DataBoxManagementClientOperationsMixin
-from ._service_operations import ServiceOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._operations import Operations # type: ignore
+from ._jobs_operations import JobsOperations # type: ignore
+from ._data_box_management_client_operations import DataBoxManagementClientOperationsMixin # type: ignore
+from ._service_operations import ServiceOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -21,5 +27,5 @@
"DataBoxManagementClientOperationsMixin",
"ServiceOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_data_box_management_client_operations.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_data_box_management_client_operations.py
index 8dfa7e7a6afe..98c72599fa32 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_data_box_management_client_operations.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_data_box_management_client_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +7,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -31,7 +30,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -43,7 +42,7 @@ def build_mitigate_request(job_name: str, resource_group_name: str, subscription
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -74,7 +73,7 @@ def build_mitigate_request(job_name: str, resource_group_name: str, subscription
class DataBoxManagementClientOperationsMixin(DataBoxManagementClientMixinABC):
@overload
- def mitigate( # pylint: disable=inconsistent-return-statements
+ def mitigate(
self,
job_name: str,
resource_group_name: str,
@@ -102,7 +101,7 @@ def mitigate( # pylint: disable=inconsistent-return-statements
"""
@overload
- def mitigate( # pylint: disable=inconsistent-return-statements
+ def mitigate(
self,
job_name: str,
resource_group_name: str,
@@ -152,7 +151,7 @@ def mitigate( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_jobs_operations.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_jobs_operations.py
index 8d0f50677ed0..6c0b997fe544 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_jobs_operations.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_jobs_operations.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines,too-many-statements
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +8,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -36,7 +36,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -48,7 +48,7 @@ def build_list_request(subscription_id: str, *, skip_token: Optional[str] = None
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -76,7 +76,7 @@ def build_mark_devices_shipped_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -110,7 +110,7 @@ def build_list_by_resource_group_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -142,7 +142,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -173,7 +173,7 @@ def build_create_request(resource_group_name: str, job_name: str, subscription_i
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -205,7 +205,7 @@ def build_delete_request(resource_group_name: str, job_name: str, subscription_i
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -236,7 +236,7 @@ def build_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -272,7 +272,7 @@ def build_book_shipment_pick_up_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -304,7 +304,7 @@ def build_cancel_request(resource_group_name: str, job_name: str, subscription_i
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -338,7 +338,7 @@ def build_list_credentials_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -399,7 +399,7 @@ def list(self, skip_token: Optional[str] = None, **kwargs: Any) -> Iterable["_mo
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.JobResourceList] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -462,7 +462,7 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
@overload
- def mark_devices_shipped( # pylint: disable=inconsistent-return-statements
+ def mark_devices_shipped(
self,
job_name: str,
resource_group_name: str,
@@ -490,7 +490,7 @@ def mark_devices_shipped( # pylint: disable=inconsistent-return-statements
"""
@overload
- def mark_devices_shipped( # pylint: disable=inconsistent-return-statements
+ def mark_devices_shipped(
self,
job_name: str,
resource_group_name: str,
@@ -541,7 +541,7 @@ def mark_devices_shipped( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -613,7 +613,7 @@ def list_by_resource_group(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.JobResourceList] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -695,7 +695,7 @@ def get(
:rtype: ~azure.mgmt.databox.models.JobResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -746,7 +746,7 @@ def _create_initial(
job_resource: Union[_models.JobResource, IO[bytes]],
**kwargs: Any
) -> Iterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -939,7 +939,7 @@ def get_long_running_output(pipeline_response):
)
def _delete_initial(self, resource_group_name: str, job_name: str, **kwargs: Any) -> Iterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1053,7 +1053,7 @@ def _update_initial(
if_match: Optional[str] = None,
**kwargs: Any
) -> Iterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1340,7 +1340,7 @@ def book_shipment_pick_up(
:rtype: ~azure.mgmt.databox.models.ShipmentPickUpResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1396,7 +1396,7 @@ def book_shipment_pick_up(
return deserialized # type: ignore
@overload
- def cancel( # pylint: disable=inconsistent-return-statements
+ def cancel(
self,
resource_group_name: str,
job_name: str,
@@ -1424,7 +1424,7 @@ def cancel( # pylint: disable=inconsistent-return-statements
"""
@overload
- def cancel( # pylint: disable=inconsistent-return-statements
+ def cancel(
self,
resource_group_name: str,
job_name: str,
@@ -1474,7 +1474,7 @@ def cancel( # pylint: disable=inconsistent-return-statements
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1548,7 +1548,7 @@ def list_credentials(
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.UnencryptedCredentialsList] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_operations.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_operations.py
index 119d5b5ef343..37ea02a3ce73 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_operations.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,7 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
-from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar
+from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
import urllib.parse
from azure.core.exceptions import (
@@ -31,7 +30,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -43,7 +42,7 @@ def build_list_request(**kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -91,7 +90,7 @@ def list(self, **kwargs: Any) -> Iterable["_models.Operation"]:
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.OperationList] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_service_operations.py b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_service_operations.py
index 039b6d40b17d..9e1824b973ad 100644
--- a/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_service_operations.py
+++ b/sdk/databox/azure-mgmt-databox/azure/mgmt/databox/operations/_service_operations.py
@@ -1,4 +1,4 @@
-# pylint: disable=too-many-lines,too-many-statements
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -8,7 +8,7 @@
# --------------------------------------------------------------------------
from io import IOBase
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, overload
+from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.exceptions import (
@@ -32,7 +32,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -46,7 +46,7 @@ def build_list_available_skus_by_resource_group_request( # pylint: disable=name
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -78,7 +78,7 @@ def build_validate_address_request(location: str, subscription_id: str, **kwargs
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -111,7 +111,7 @@ def build_validate_inputs_by_resource_group_request( # pylint: disable=name-too
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -143,7 +143,7 @@ def build_validate_inputs_request(location: str, subscription_id: str, **kwargs:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -174,7 +174,7 @@ def build_region_configuration_request(location: str, subscription_id: str, **kw
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -207,7 +207,7 @@ def build_region_configuration_by_resource_group_request( # pylint: disable=nam
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-12-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-02-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -337,7 +337,7 @@ def list_available_skus_by_resource_group(
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.AvailableSkusResult] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -469,7 +469,7 @@ def validate_address(
:rtype: ~azure.mgmt.databox.models.AddressValidationOutput
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -596,7 +596,7 @@ def validate_inputs_by_resource_group(
:rtype: ~azure.mgmt.databox.models.ValidationResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -707,7 +707,7 @@ def validate_inputs(
:rtype: ~azure.mgmt.databox.models.ValidationResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -831,7 +831,7 @@ def region_configuration(
:rtype: ~azure.mgmt.databox.models.RegionConfigurationResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -965,7 +965,7 @@ def region_configuration_by_resource_group(
:rtype: ~azure.mgmt.databox.models.RegionConfigurationResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/available_skus_post.py b/sdk/databox/azure-mgmt-databox/generated_samples/available_skus_post.py
index 603a063ad7e7..501f22211e34 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/available_skus_post.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/available_skus_post.py
@@ -39,6 +39,6 @@ def main():
print(item)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/AvailableSkusPost.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/AvailableSkusPost.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/book_shipment_pickup_post.py b/sdk/databox/azure-mgmt-databox/generated_samples/book_shipment_pickup_post.py
index 94f3263df5e8..9f07c04b815d 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/book_shipment_pickup_post.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/book_shipment_pickup_post.py
@@ -42,6 +42,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/BookShipmentPickupPost.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/BookShipmentPickupPost.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/job_mitigate.py b/sdk/databox/azure-mgmt-databox/generated_samples/job_mitigate.py
index 2481f3b845f6..e64ebfd12f25 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/job_mitigate.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/job_mitigate.py
@@ -39,6 +39,6 @@ def main():
)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobMitigate.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobMitigate.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_cancel_post.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_cancel_post.py
index af32999621c0..aca2871898cf 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_cancel_post.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_cancel_post.py
@@ -37,6 +37,6 @@ def main():
)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsCancelPost.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsCancelPost.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create.py
index ea4df71b1660..08da28269e0b 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create.py
@@ -65,12 +65,12 @@ def main():
},
"transferType": "ImportToAzure",
},
- "sku": {"name": "DataBox"},
+ "sku": {"model": "DataBox", "name": "DataBox"},
},
).result()
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsCreate.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsCreate.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_device_password.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_device_password.py
index 7e07d91a6b18..5083d6b96306 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_device_password.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_device_password.py
@@ -67,12 +67,12 @@ def main():
},
"transferType": "ImportToAzure",
},
- "sku": {"name": "DataBox"},
+ "sku": {"model": "DataBox", "name": "DataBox"},
},
).result()
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsCreateDevicePassword.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsCreateDevicePassword.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_double_encryption.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_double_encryption.py
index 0f202d62efef..be062a2fa2b1 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_double_encryption.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_double_encryption.py
@@ -66,12 +66,12 @@ def main():
},
"transferType": "ImportToAzure",
},
- "sku": {"name": "DataBox"},
+ "sku": {"model": "DataBox", "name": "DataBox"},
},
).result()
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsCreateDoubleEncryption.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsCreateDoubleEncryption.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_export.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_export.py
index ffbb994db6df..0f350476496b 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_export.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_export.py
@@ -75,12 +75,12 @@ def main():
},
"transferType": "ExportFromAzure",
},
- "sku": {"name": "DataBox"},
+ "sku": {"model": "DataBox", "name": "DataBox"},
},
).result()
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsCreateExport.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsCreateExport.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_with_user_assigned_identity.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_with_user_assigned_identity.py
index 890906d3bc05..4f4b2129491b 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_with_user_assigned_identity.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_create_with_user_assigned_identity.py
@@ -71,12 +71,12 @@ def main():
},
"transferType": "ImportToAzure",
},
- "sku": {"name": "DataBox"},
+ "sku": {"model": "DataBox", "name": "DataBox"},
},
).result()
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsCreateWithUserAssignedIdentity.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsCreateWithUserAssignedIdentity.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_delete.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_delete.py
index 66d014809657..21d73389f8f4 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_delete.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_delete.py
@@ -36,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsDelete.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsDelete.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get.py
index 15d13fb72ef7..9f0c6dbae06e 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get.py
@@ -37,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsGet.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsGet.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_cmk.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_cmk.py
index 69eb2562fe2a..9684c241f7bf 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_cmk.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_cmk.py
@@ -37,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsGetCmk.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsGetCmk.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_copy_stuck.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_copy_stuck.py
index ba3c9a554b73..f026243f5ed4 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_copy_stuck.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_copy_stuck.py
@@ -37,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsGetCopyStuck.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsGetCopyStuck.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_export.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_export.py
index dc0bedeb620f..8303c9c69b38 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_export.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_export.py
@@ -37,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsGetExport.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsGetExport.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_waiting_for_action.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_waiting_for_action.py
index 074de3797c30..a8d05f85d7a6 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_waiting_for_action.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_get_waiting_for_action.py
@@ -37,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsGetWaitingForAction.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsGetWaitingForAction.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_list.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_list.py
index 8b607c64d12e..66ef6cb2f17d 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_list.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_list.py
@@ -35,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsList.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsList.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_list_by_resource_group.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_list_by_resource_group.py
index e18dd0bf3c6b..250d500f1898 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_list_by_resource_group.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_list_by_resource_group.py
@@ -37,6 +37,6 @@ def main():
print(item)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsListByResourceGroup.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsListByResourceGroup.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_list_credentials.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_list_credentials.py
index 207d7af5d4b3..a6a5a743c571 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_list_credentials.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_list_credentials.py
@@ -38,6 +38,6 @@ def main():
print(item)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsListCredentials.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsListCredentials.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_patch.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_patch.py
index 64eca807313f..3d7415c2fe85 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_patch.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_patch.py
@@ -59,6 +59,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsPatch.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsPatch.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_patch_cmk.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_patch_cmk.py
index 68679e6bdda1..e94335e8dafb 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_patch_cmk.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_patch_cmk.py
@@ -48,6 +48,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsPatchCmk.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsPatchCmk.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_patch_system_assigned_to_user_assigned.py b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_patch_system_assigned_to_user_assigned.py
index c9fd485711e5..bbfc30e9f00c 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/jobs_patch_system_assigned_to_user_assigned.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/jobs_patch_system_assigned_to_user_assigned.py
@@ -60,6 +60,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/JobsPatchSystemAssignedToUserAssigned.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/JobsPatchSystemAssignedToUserAssigned.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/mark_devices_shipped.py b/sdk/databox/azure-mgmt-databox/generated_samples/mark_devices_shipped.py
index 7ed79bef7167..709112682fca 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/mark_devices_shipped.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/mark_devices_shipped.py
@@ -39,6 +39,6 @@ def main():
)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/MarkDevicesShipped.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/MarkDevicesShipped.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/operations_get.py b/sdk/databox/azure-mgmt-databox/generated_samples/operations_get.py
index 8c27eea6eba8..ac96c488caf8 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/operations_get.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/operations_get.py
@@ -35,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/OperationsGet.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/OperationsGet.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/region_configuration.py b/sdk/databox/azure-mgmt-databox/generated_samples/region_configuration.py
index 6443cd61d27d..f0ff6a4d167e 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/region_configuration.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/region_configuration.py
@@ -33,12 +33,13 @@ def main():
response = client.service.region_configuration(
location="westus",
region_configuration_request={
- "scheduleAvailabilityRequest": {"skuName": "DataBox", "storageLocation": "westus"}
+ "deviceCapabilityRequest": {"model": "DataBoxDisk", "skuName": "DataBoxDisk"},
+ "scheduleAvailabilityRequest": {"model": "DataBox", "skuName": "DataBox", "storageLocation": "westus"},
},
)
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/RegionConfiguration.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/RegionConfiguration.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/region_configuration_by_resource_group.py b/sdk/databox/azure-mgmt-databox/generated_samples/region_configuration_by_resource_group.py
index ef001951f7de..4322a5715157 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/region_configuration_by_resource_group.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/region_configuration_by_resource_group.py
@@ -34,12 +34,13 @@ def main():
resource_group_name="YourResourceGroupName",
location="westus",
region_configuration_request={
- "scheduleAvailabilityRequest": {"skuName": "DataBox", "storageLocation": "westus"}
+ "deviceCapabilityRequest": {"model": "DataBoxDisk", "skuName": "DataBoxDisk"},
+ "scheduleAvailabilityRequest": {"model": "DataBox", "skuName": "DataBox", "storageLocation": "westus"},
},
)
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/RegionConfigurationByResourceGroup.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/RegionConfigurationByResourceGroup.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/validate_address_post.py b/sdk/databox/azure-mgmt-databox/generated_samples/validate_address_post.py
index b1406510be5f..7d3a034b7c0d 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/validate_address_post.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/validate_address_post.py
@@ -34,6 +34,7 @@ def main():
location="westus",
validate_address={
"deviceType": "DataBox",
+ "model": "DataBox",
"shippingAddress": {
"addressType": "Commercial",
"city": "XXXX XXXX",
@@ -50,6 +51,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/ValidateAddressPost.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/ValidateAddressPost.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/validate_inputs.py b/sdk/databox/azure-mgmt-databox/generated_samples/validate_inputs.py
index 71b02835229c..013daaf6d5f3 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/validate_inputs.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/validate_inputs.py
@@ -44,11 +44,13 @@ def main():
}
],
"deviceType": "DataBox",
+ "model": "DataBox",
"transferType": "ImportToAzure",
"validationType": "ValidateDataTransferDetails",
},
{
"deviceType": "DataBox",
+ "model": "DataBox",
"shippingAddress": {
"addressType": "Commercial",
"city": "XXXX XXXX",
@@ -67,12 +69,14 @@ def main():
"country": "XX",
"deviceType": "DataBox",
"location": "westus",
+ "model": "DataBox",
"transferType": "ImportToAzure",
"validationType": "ValidateSkuAvailability",
},
- {"deviceType": "DataBox", "validationType": "ValidateCreateOrderLimit"},
+ {"deviceType": "DataBox", "model": "DataBox", "validationType": "ValidateCreateOrderLimit"},
{
"deviceType": "DataBox",
+ "model": "DataBox",
"preference": {"transportPreferences": {"preferredShipmentType": "MicrosoftManaged"}},
"validationType": "ValidatePreferences",
},
@@ -83,6 +87,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/ValidateInputs.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/ValidateInputs.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_samples/validate_inputs_by_resource_group.py b/sdk/databox/azure-mgmt-databox/generated_samples/validate_inputs_by_resource_group.py
index 9219c46fc7ea..b2d8f6fa88ca 100644
--- a/sdk/databox/azure-mgmt-databox/generated_samples/validate_inputs_by_resource_group.py
+++ b/sdk/databox/azure-mgmt-databox/generated_samples/validate_inputs_by_resource_group.py
@@ -45,11 +45,13 @@ def main():
}
],
"deviceType": "DataBox",
+ "model": "DataBox",
"transferType": "ImportToAzure",
"validationType": "ValidateDataTransferDetails",
},
{
"deviceType": "DataBox",
+ "model": "DataBox",
"shippingAddress": {
"addressType": "Commercial",
"city": "XXXX XXXX",
@@ -68,12 +70,14 @@ def main():
"country": "XX",
"deviceType": "DataBox",
"location": "westus",
+ "model": "DataBox",
"transferType": "ImportToAzure",
"validationType": "ValidateSkuAvailability",
},
- {"deviceType": "DataBox", "validationType": "ValidateCreateOrderLimit"},
+ {"deviceType": "DataBox", "model": "DataBox", "validationType": "ValidateCreateOrderLimit"},
{
"deviceType": "DataBox",
+ "model": "DataBox",
"preference": {"transportPreferences": {"preferredShipmentType": "MicrosoftManaged"}},
"validationType": "ValidatePreferences",
},
@@ -84,6 +88,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2022-12-01/examples/ValidateInputsByResourceGroup.json
+# x-ms-original-file: specification/databox/resource-manager/Microsoft.DataBox/stable/2025-02-01/examples/ValidateInputsByResourceGroup.json
if __name__ == "__main__":
main()
diff --git a/sdk/databox/azure-mgmt-databox/generated_tests/conftest.py b/sdk/databox/azure-mgmt-databox/generated_tests/conftest.py
index a72d01c731af..01886a327895 100644
--- a/sdk/databox/azure-mgmt-databox/generated_tests/conftest.py
+++ b/sdk/databox/azure-mgmt-databox/generated_tests/conftest.py
@@ -18,7 +18,7 @@
load_dotenv()
-# aovid record sensitive identity information in recordings
+# For security, please avoid record sensitive identity information in recordings
@pytest.fixture(scope="session", autouse=True)
def add_sanitizers(test_proxy):
databoxmanagement_subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000")
diff --git a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management.py b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management.py
index 5702267e2f51..cd535f1a026a 100644
--- a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management.py
+++ b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management.py
@@ -25,7 +25,7 @@ def test_mitigate(self, resource_group):
job_name="str",
resource_group_name=resource_group.name,
mitigate_job_request={"customerResolutionCode": "str", "serialNumberCustomerResolutionMap": {"str": "str"}},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
diff --git a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_async.py b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_async.py
index 7b839c679419..ba5a1d2d21e3 100644
--- a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_async.py
+++ b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_async.py
@@ -26,7 +26,7 @@ async def test_mitigate(self, resource_group):
job_name="str",
resource_group_name=resource_group.name,
mitigate_job_request={"customerResolutionCode": "str", "serialNumberCustomerResolutionMap": {"str": "str"}},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
diff --git a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_jobs_operations.py b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_jobs_operations.py
index 7a1c0f820f93..3b1b4b08647c 100644
--- a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_jobs_operations.py
+++ b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_jobs_operations.py
@@ -20,9 +20,9 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list(self, resource_group):
+ def test_jobs_list(self, resource_group):
response = self.client.jobs.list(
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
result = [r for r in response]
# please add some check logic here by yourself
@@ -30,12 +30,12 @@ def test_list(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_mark_devices_shipped(self, resource_group):
+ def test_jobs_mark_devices_shipped(self, resource_group):
response = self.client.jobs.mark_devices_shipped(
job_name="str",
resource_group_name=resource_group.name,
mark_devices_shipped_request={"deliverToDcPackageDetails": {"carrierName": "str", "trackingId": "str"}},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -43,10 +43,10 @@ def test_mark_devices_shipped(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_by_resource_group(self, resource_group):
+ def test_jobs_list_by_resource_group(self, resource_group):
response = self.client.jobs.list_by_resource_group(
resource_group_name=resource_group.name,
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
result = [r for r in response]
# please add some check logic here by yourself
@@ -54,11 +54,11 @@ def test_list_by_resource_group(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_get(self, resource_group):
+ def test_jobs_get(self, resource_group):
response = self.client.jobs.get(
resource_group_name=resource_group.name,
job_name="str",
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -66,15 +66,17 @@ def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_begin_create(self, resource_group):
+ def test_jobs_begin_create(self, resource_group):
response = self.client.jobs.begin_create(
resource_group_name=resource_group.name,
job_name="str",
job_resource={
"location": "str",
- "sku": {"name": "str", "displayName": "str", "family": "str"},
+ "sku": {"name": "str", "displayName": "str", "family": "str", "model": "str"},
"transferType": "str",
+ "allDevicesLost": bool,
"cancellationReason": "str",
+ "delayedStage": "str",
"deliveryInfo": {"scheduledDateTime": "2020-02-20 00:00:00"},
"deliveryType": "NonScheduled",
"details": "job_details",
@@ -113,7 +115,7 @@ def test_begin_create(self, resource_group):
"tags": {"str": "str"},
"type": "str",
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
).result() # call '.result()' to poll until service return final result
# please add some check logic here by yourself
@@ -121,11 +123,11 @@ def test_begin_create(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_begin_delete(self, resource_group):
+ def test_jobs_begin_delete(self, resource_group):
response = self.client.jobs.begin_delete(
resource_group_name=resource_group.name,
job_name="str",
- api_version="2022-12-01",
+ api_version="2025-02-01",
).result() # call '.result()' to poll until service return final result
# please add some check logic here by yourself
@@ -133,7 +135,7 @@ def test_begin_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_begin_update(self, resource_group):
+ def test_jobs_begin_update(self, resource_group):
response = self.client.jobs.begin_update(
resource_group_name=resource_group.name,
job_name="str",
@@ -211,7 +213,7 @@ def test_begin_update(self, resource_group):
},
"tags": {"str": "str"},
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
).result() # call '.result()' to poll until service return final result
# please add some check logic here by yourself
@@ -219,7 +221,7 @@ def test_begin_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_book_shipment_pick_up(self, resource_group):
+ def test_jobs_book_shipment_pick_up(self, resource_group):
response = self.client.jobs.book_shipment_pick_up(
resource_group_name=resource_group.name,
job_name="str",
@@ -228,7 +230,7 @@ def test_book_shipment_pick_up(self, resource_group):
"shipmentLocation": "str",
"startTime": "2020-02-20 00:00:00",
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -236,12 +238,12 @@ def test_book_shipment_pick_up(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_cancel(self, resource_group):
+ def test_jobs_cancel(self, resource_group):
response = self.client.jobs.cancel(
resource_group_name=resource_group.name,
job_name="str",
cancellation_reason={"reason": "str"},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -249,11 +251,11 @@ def test_cancel(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_credentials(self, resource_group):
+ def test_jobs_list_credentials(self, resource_group):
response = self.client.jobs.list_credentials(
resource_group_name=resource_group.name,
job_name="str",
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
result = [r for r in response]
# please add some check logic here by yourself
diff --git a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_jobs_operations_async.py b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_jobs_operations_async.py
index 415b0c92e722..be9ac8f08bc6 100644
--- a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_jobs_operations_async.py
+++ b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_jobs_operations_async.py
@@ -21,9 +21,9 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list(self, resource_group):
+ async def test_jobs_list(self, resource_group):
response = self.client.jobs.list(
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
result = [r async for r in response]
# please add some check logic here by yourself
@@ -31,12 +31,12 @@ async def test_list(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_mark_devices_shipped(self, resource_group):
+ async def test_jobs_mark_devices_shipped(self, resource_group):
response = await self.client.jobs.mark_devices_shipped(
job_name="str",
resource_group_name=resource_group.name,
mark_devices_shipped_request={"deliverToDcPackageDetails": {"carrierName": "str", "trackingId": "str"}},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -44,10 +44,10 @@ async def test_mark_devices_shipped(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_by_resource_group(self, resource_group):
+ async def test_jobs_list_by_resource_group(self, resource_group):
response = self.client.jobs.list_by_resource_group(
resource_group_name=resource_group.name,
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
result = [r async for r in response]
# please add some check logic here by yourself
@@ -55,11 +55,11 @@ async def test_list_by_resource_group(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_get(self, resource_group):
+ async def test_jobs_get(self, resource_group):
response = await self.client.jobs.get(
resource_group_name=resource_group.name,
job_name="str",
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -67,16 +67,18 @@ async def test_get(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_begin_create(self, resource_group):
+ async def test_jobs_begin_create(self, resource_group):
response = await (
await self.client.jobs.begin_create(
resource_group_name=resource_group.name,
job_name="str",
job_resource={
"location": "str",
- "sku": {"name": "str", "displayName": "str", "family": "str"},
+ "sku": {"name": "str", "displayName": "str", "family": "str", "model": "str"},
"transferType": "str",
+ "allDevicesLost": bool,
"cancellationReason": "str",
+ "delayedStage": "str",
"deliveryInfo": {"scheduledDateTime": "2020-02-20 00:00:00"},
"deliveryType": "NonScheduled",
"details": "job_details",
@@ -115,7 +117,7 @@ async def test_begin_create(self, resource_group):
"tags": {"str": "str"},
"type": "str",
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
).result() # call '.result()' to poll until service return final result
@@ -124,12 +126,12 @@ async def test_begin_create(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_begin_delete(self, resource_group):
+ async def test_jobs_begin_delete(self, resource_group):
response = await (
await self.client.jobs.begin_delete(
resource_group_name=resource_group.name,
job_name="str",
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
).result() # call '.result()' to poll until service return final result
@@ -138,7 +140,7 @@ async def test_begin_delete(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_begin_update(self, resource_group):
+ async def test_jobs_begin_update(self, resource_group):
response = await (
await self.client.jobs.begin_update(
resource_group_name=resource_group.name,
@@ -217,7 +219,7 @@ async def test_begin_update(self, resource_group):
},
"tags": {"str": "str"},
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
).result() # call '.result()' to poll until service return final result
@@ -226,7 +228,7 @@ async def test_begin_update(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_book_shipment_pick_up(self, resource_group):
+ async def test_jobs_book_shipment_pick_up(self, resource_group):
response = await self.client.jobs.book_shipment_pick_up(
resource_group_name=resource_group.name,
job_name="str",
@@ -235,7 +237,7 @@ async def test_book_shipment_pick_up(self, resource_group):
"shipmentLocation": "str",
"startTime": "2020-02-20 00:00:00",
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -243,12 +245,12 @@ async def test_book_shipment_pick_up(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_cancel(self, resource_group):
+ async def test_jobs_cancel(self, resource_group):
response = await self.client.jobs.cancel(
resource_group_name=resource_group.name,
job_name="str",
cancellation_reason={"reason": "str"},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -256,11 +258,11 @@ async def test_cancel(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_credentials(self, resource_group):
+ async def test_jobs_list_credentials(self, resource_group):
response = self.client.jobs.list_credentials(
resource_group_name=resource_group.name,
job_name="str",
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
result = [r async for r in response]
# please add some check logic here by yourself
diff --git a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_operations.py b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_operations.py
index 208de6b86816..c15360974786 100644
--- a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_operations.py
+++ b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_operations.py
@@ -20,9 +20,9 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list(self, resource_group):
+ def test_operations_list(self, resource_group):
response = self.client.operations.list(
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
result = [r for r in response]
# please add some check logic here by yourself
diff --git a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_operations_async.py b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_operations_async.py
index 16138bb49fd7..bf1b8fd87127 100644
--- a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_operations_async.py
+++ b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_operations_async.py
@@ -21,9 +21,9 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list(self, resource_group):
+ async def test_operations_list(self, resource_group):
response = self.client.operations.list(
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
result = [r async for r in response]
# please add some check logic here by yourself
diff --git a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_service_operations.py b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_service_operations.py
index 2e1340cf87fa..45498cddde95 100644
--- a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_service_operations.py
+++ b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_service_operations.py
@@ -20,12 +20,12 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_list_available_skus_by_resource_group(self, resource_group):
+ def test_service_list_available_skus_by_resource_group(self, resource_group):
response = self.client.service.list_available_skus_by_resource_group(
resource_group_name=resource_group.name,
location="str",
available_sku_request={"country": "str", "location": "str", "transferType": "str", "skuNames": ["str"]},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
result = [r for r in response]
# please add some check logic here by yourself
@@ -33,7 +33,7 @@ def test_list_available_skus_by_resource_group(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_validate_address(self, resource_group):
+ def test_service_validate_address(self, resource_group):
response = self.client.service.validate_address(
location="str",
validate_address={
@@ -53,9 +53,10 @@ def test_validate_address(self, resource_group):
"zipExtendedCode": "str",
},
"validationType": "ValidateAddress",
+ "model": "str",
"transportPreferences": {"preferredShipmentType": "str", "isUpdated": bool},
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -63,7 +64,7 @@ def test_validate_address(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_validate_inputs_by_resource_group(self, resource_group):
+ def test_service_validate_inputs_by_resource_group(self, resource_group):
response = self.client.service.validate_inputs_by_resource_group(
resource_group_name=resource_group.name,
location="str",
@@ -71,7 +72,7 @@ def test_validate_inputs_by_resource_group(self, resource_group):
"individualRequestDetails": ["validation_input_request"],
"validationCategory": "JobCreationValidation",
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -79,14 +80,14 @@ def test_validate_inputs_by_resource_group(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_validate_inputs(self, resource_group):
+ def test_service_validate_inputs(self, resource_group):
response = self.client.service.validate_inputs(
location="str",
validation_request={
"individualRequestDetails": ["validation_input_request"],
"validationCategory": "JobCreationValidation",
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -94,15 +95,16 @@ def test_validate_inputs(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_region_configuration(self, resource_group):
+ def test_service_region_configuration(self, resource_group):
response = self.client.service.region_configuration(
location="str",
region_configuration_request={
- "datacenterAddressRequest": {"skuName": "str", "storageLocation": "str"},
+ "datacenterAddressRequest": {"skuName": "str", "storageLocation": "str", "model": "str"},
+ "deviceCapabilityRequest": {"model": "str", "skuName": "str"},
"scheduleAvailabilityRequest": "schedule_availability_request",
- "transportAvailabilityRequest": {"skuName": "str"},
+ "transportAvailabilityRequest": {"model": "str", "skuName": "str"},
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -110,16 +112,17 @@ def test_region_configuration(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
- def test_region_configuration_by_resource_group(self, resource_group):
+ def test_service_region_configuration_by_resource_group(self, resource_group):
response = self.client.service.region_configuration_by_resource_group(
resource_group_name=resource_group.name,
location="str",
region_configuration_request={
- "datacenterAddressRequest": {"skuName": "str", "storageLocation": "str"},
+ "datacenterAddressRequest": {"skuName": "str", "storageLocation": "str", "model": "str"},
+ "deviceCapabilityRequest": {"model": "str", "skuName": "str"},
"scheduleAvailabilityRequest": "schedule_availability_request",
- "transportAvailabilityRequest": {"skuName": "str"},
+ "transportAvailabilityRequest": {"model": "str", "skuName": "str"},
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
diff --git a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_service_operations_async.py b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_service_operations_async.py
index 16d05447ea9d..2cc884be54f9 100644
--- a/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_service_operations_async.py
+++ b/sdk/databox/azure-mgmt-databox/generated_tests/test_data_box_management_service_operations_async.py
@@ -21,12 +21,12 @@ def setup_method(self, method):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_list_available_skus_by_resource_group(self, resource_group):
+ async def test_service_list_available_skus_by_resource_group(self, resource_group):
response = self.client.service.list_available_skus_by_resource_group(
resource_group_name=resource_group.name,
location="str",
available_sku_request={"country": "str", "location": "str", "transferType": "str", "skuNames": ["str"]},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
result = [r async for r in response]
# please add some check logic here by yourself
@@ -34,7 +34,7 @@ async def test_list_available_skus_by_resource_group(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_validate_address(self, resource_group):
+ async def test_service_validate_address(self, resource_group):
response = await self.client.service.validate_address(
location="str",
validate_address={
@@ -54,9 +54,10 @@ async def test_validate_address(self, resource_group):
"zipExtendedCode": "str",
},
"validationType": "ValidateAddress",
+ "model": "str",
"transportPreferences": {"preferredShipmentType": "str", "isUpdated": bool},
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -64,7 +65,7 @@ async def test_validate_address(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_validate_inputs_by_resource_group(self, resource_group):
+ async def test_service_validate_inputs_by_resource_group(self, resource_group):
response = await self.client.service.validate_inputs_by_resource_group(
resource_group_name=resource_group.name,
location="str",
@@ -72,7 +73,7 @@ async def test_validate_inputs_by_resource_group(self, resource_group):
"individualRequestDetails": ["validation_input_request"],
"validationCategory": "JobCreationValidation",
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -80,14 +81,14 @@ async def test_validate_inputs_by_resource_group(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_validate_inputs(self, resource_group):
+ async def test_service_validate_inputs(self, resource_group):
response = await self.client.service.validate_inputs(
location="str",
validation_request={
"individualRequestDetails": ["validation_input_request"],
"validationCategory": "JobCreationValidation",
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -95,15 +96,16 @@ async def test_validate_inputs(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_region_configuration(self, resource_group):
+ async def test_service_region_configuration(self, resource_group):
response = await self.client.service.region_configuration(
location="str",
region_configuration_request={
- "datacenterAddressRequest": {"skuName": "str", "storageLocation": "str"},
+ "datacenterAddressRequest": {"skuName": "str", "storageLocation": "str", "model": "str"},
+ "deviceCapabilityRequest": {"model": "str", "skuName": "str"},
"scheduleAvailabilityRequest": "schedule_availability_request",
- "transportAvailabilityRequest": {"skuName": "str"},
+ "transportAvailabilityRequest": {"model": "str", "skuName": "str"},
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
@@ -111,16 +113,17 @@ async def test_region_configuration(self, resource_group):
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy_async
- async def test_region_configuration_by_resource_group(self, resource_group):
+ async def test_service_region_configuration_by_resource_group(self, resource_group):
response = await self.client.service.region_configuration_by_resource_group(
resource_group_name=resource_group.name,
location="str",
region_configuration_request={
- "datacenterAddressRequest": {"skuName": "str", "storageLocation": "str"},
+ "datacenterAddressRequest": {"skuName": "str", "storageLocation": "str", "model": "str"},
+ "deviceCapabilityRequest": {"model": "str", "skuName": "str"},
"scheduleAvailabilityRequest": "schedule_availability_request",
- "transportAvailabilityRequest": {"skuName": "str"},
+ "transportAvailabilityRequest": {"model": "str", "skuName": "str"},
},
- api_version="2022-12-01",
+ api_version="2025-02-01",
)
# please add some check logic here by yourself
diff --git a/sdk/databox/azure-mgmt-databox/setup.py b/sdk/databox/azure-mgmt-databox/setup.py
index 5b39dfd469f1..a501a52acfdd 100644
--- a/sdk/databox/azure-mgmt-databox/setup.py
+++ b/sdk/databox/azure-mgmt-databox/setup.py
@@ -22,11 +22,9 @@
# Version extraction inspired from 'requests'
with open(
- (
- os.path.join(package_folder_path, "version.py")
- if os.path.exists(os.path.join(package_folder_path, "version.py"))
- else os.path.join(package_folder_path, "_version.py")
- ),
+ os.path.join(package_folder_path, "version.py")
+ if os.path.exists(os.path.join(package_folder_path, "version.py"))
+ else os.path.join(package_folder_path, "_version.py"),
"r",
) as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1)