diff --git a/sdk/databricks/azure-mgmt-databricks/CHANGELOG.md b/sdk/databricks/azure-mgmt-databricks/CHANGELOG.md
index 9ae1b6852074..a17a49914c07 100644
--- a/sdk/databricks/azure-mgmt-databricks/CHANGELOG.md
+++ b/sdk/databricks/azure-mgmt-databricks/CHANGELOG.md
@@ -1,5 +1,40 @@
# Release History
+## 3.0.0b1 (2025-03-31)
+
+### Features Added
+
+ - Model `AccessConnectorProperties` added property `refered_by`
+ - Model `Workspace` added property `enhanced_security_compliance`
+ - Model `Workspace` added property `default_catalog`
+ - Model `Workspace` added property `is_uc_enabled`
+ - Model `Workspace` added property `access_connector`
+ - Model `Workspace` added property `default_storage_firewall`
+ - Added model `AutomaticClusterUpdateDefinition`
+ - Added enum `AutomaticClusterUpdateValue`
+ - Added model `ComplianceSecurityProfileDefinition`
+ - Added enum `ComplianceSecurityProfileValue`
+ - Added enum `ComplianceStandard`
+ - Added model `DefaultCatalogProperties`
+ - Added enum `DefaultStorageFirewall`
+ - Added model `EnhancedSecurityComplianceDefinition`
+ - Added model `EnhancedSecurityMonitoringDefinition`
+ - Added enum `EnhancedSecurityMonitoringValue`
+ - Added enum `IdentityType`
+ - Added enum `InitialType`
+ - Added model `WorkspaceNoPublicIPBooleanParameter`
+ - Added model `WorkspacePropertiesAccessConnector`
+ - Method `AccessConnectorsOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, connector_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `AccessConnectorsOperations.begin_update` has a new overload `def begin_update(self: None, resource_group_name: str, connector_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `PrivateEndpointConnectionsOperations.begin_create` has a new overload `def begin_create(self: None, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, private_endpoint_connection: IO[bytes], content_type: str)`
+ - Method `VNetPeeringOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, workspace_name: str, peering_name: str, virtual_network_peering_parameters: IO[bytes], content_type: str)`
+ - Method `WorkspacesOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, workspace_name: str, parameters: IO[bytes], content_type: str)`
+ - Method `WorkspacesOperations.begin_update` has a new overload `def begin_update(self: None, resource_group_name: str, workspace_name: str, parameters: IO[bytes], content_type: str)`
+
+### Breaking Changes
+
+ - Method `WorkspacesOperations.begin_delete` inserted a `positional_or_keyword` parameter `force_deletion`
+
## 2.0.0 (2023-06-29)
### Features Added
diff --git a/sdk/databricks/azure-mgmt-databricks/README.md b/sdk/databricks/azure-mgmt-databricks/README.md
index 88cd8bbb286d..9eaea4defbae 100644
--- a/sdk/databricks/azure-mgmt-databricks/README.md
+++ b/sdk/databricks/azure-mgmt-databricks/README.md
@@ -1,7 +1,7 @@
# Microsoft Azure SDK for Python
This is the Microsoft Azure Data Bricks Management Client Library.
-This package has been tested with Python 3.7+.
+This package has been tested with Python 3.8+.
For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all).
## _Disclaimer_
@@ -12,7 +12,7 @@ _Azure SDK Python packages support for Python 2.7 has ended 01 January 2022. For
### Prerequisites
-- Python 3.7+ is required to use this package.
+- Python 3.8+ is required to use this package.
- [Azure subscription](https://azure.microsoft.com/free/)
### Install the package
@@ -24,7 +24,7 @@ pip install azure-identity
### Authentication
-By default, [Azure Active Directory](https://aka.ms/awps/aad) token authentication depends on correct configure of following environment variables.
+By default, [Azure Active Directory](https://aka.ms/awps/aad) token authentication depends on correct configuration of the following environment variables.
- `AZURE_CLIENT_ID` for Azure client ID.
- `AZURE_TENANT_ID` for Azure tenant ID.
@@ -59,6 +59,3 @@ Code samples for this package can be found at:
If you encounter any bugs or have suggestions, please file an issue in the
[Issues](https://github.com/Azure/azure-sdk-for-python/issues)
section of the project.
-
-
-
diff --git a/sdk/databricks/azure-mgmt-databricks/_meta.json b/sdk/databricks/azure-mgmt-databricks/_meta.json
index f2a9d3dd0662..93cd7684bf93 100644
--- a/sdk/databricks/azure-mgmt-databricks/_meta.json
+++ b/sdk/databricks/azure-mgmt-databricks/_meta.json
@@ -1,11 +1,11 @@
{
- "commit": "f36175f4c54eeec5b6d409406e131dadb540546a",
+ "commit": "59c4421b00666b8f0d877d69829a5fecceab779b",
"repository_url": "https://github.com/Azure/azure-rest-api-specs",
- "autorest": "3.9.2",
+ "autorest": "3.10.2",
"use": [
- "@autorest/python@6.6.0",
- "@autorest/modelerfour@4.24.3"
+ "@autorest/python@6.27.4",
+ "@autorest/modelerfour@4.27.0"
],
- "autorest_command": "autorest specification/databricks/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.6.0 --use=@autorest/modelerfour@4.24.3 --version=3.9.2 --version-tolerant=False",
+ "autorest_command": "autorest specification/databricks/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.27.4 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False",
"readme": "specification/databricks/resource-manager/readme.md"
}
\ No newline at end of file
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/__init__.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/__init__.py
index 5bcd6309b84e..783fbff2eb51 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/__init__.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/__init__.py
@@ -5,15 +5,21 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._azure_databricks_management_client import AzureDatabricksManagementClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._azure_databricks_management_client import AzureDatabricksManagementClient # type: ignore
from ._version import VERSION
__version__ = VERSION
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -21,6 +27,6 @@
__all__ = [
"AzureDatabricksManagementClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_azure_databricks_management_client.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_azure_databricks_management_client.py
index cc6649e1a282..204a0ba55f96 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_azure_databricks_management_client.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_azure_databricks_management_client.py
@@ -8,9 +8,12 @@
from copy import deepcopy
from typing import Any, TYPE_CHECKING
+from typing_extensions import Self
+from azure.core.pipeline import policies
from azure.core.rest import HttpRequest, HttpResponse
from azure.mgmt.core import ARMPipelineClient
+from azure.mgmt.core.policies import ARMAutoResourceProviderRegistrationPolicy
from . import models as _models
from ._configuration import AzureDatabricksManagementClientConfiguration
@@ -26,11 +29,10 @@
)
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
-class AzureDatabricksManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes
+class AzureDatabricksManagementClient: # pylint: disable=too-many-instance-attributes
"""The Microsoft Azure management APIs allow end users to operate on Azure Databricks Workspace /
Access Connector resources.
@@ -54,10 +56,13 @@ class AzureDatabricksManagementClient: # pylint: disable=client-accepts-api-ver
:vartype access_connectors: azure.mgmt.databricks.operations.AccessConnectorsOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials.TokenCredential
- :param subscription_id: The ID of the target subscription. Required.
+ :param subscription_id: The ID of the target subscription. The value must be an UUID. Required.
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
+ :keyword api_version: Api Version. Default value is "2025-03-01-preview". Note that overriding
+ this default value may result in unsupported behavior.
+ :paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
@@ -72,7 +77,25 @@ def __init__(
self._config = AzureDatabricksManagementClientConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
- self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+ _policies = kwargs.pop("policies", None)
+ if _policies is None:
+ _policies = [
+ policies.RequestIdPolicy(**kwargs),
+ self._config.headers_policy,
+ self._config.user_agent_policy,
+ self._config.proxy_policy,
+ policies.ContentDecodePolicy(**kwargs),
+ ARMAutoResourceProviderRegistrationPolicy(),
+ self._config.redirect_policy,
+ self._config.retry_policy,
+ self._config.authentication_policy,
+ self._config.custom_hook_policy,
+ self._config.logging_policy,
+ policies.DistributedTracingPolicy(**kwargs),
+ policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None,
+ self._config.http_logging_policy,
+ ]
+ self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, policies=_policies, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
@@ -94,7 +117,7 @@ def __init__(
self._client, self._config, self._serialize, self._deserialize
)
- def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
+ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
@@ -114,12 +137,12 @@ def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
- return self._client.send_request(request_copy, **kwargs)
+ return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore
def close(self) -> None:
self._client.close()
- def __enter__(self) -> "AzureDatabricksManagementClient":
+ def __enter__(self) -> Self:
self._client.__enter__()
return self
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_configuration.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_configuration.py
index 3b9c2e63367e..75f6c7d5caf8 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_configuration.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_configuration.py
@@ -8,18 +8,16 @@
from typing import Any, TYPE_CHECKING
-from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy
from ._version import VERSION
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
-class AzureDatabricksManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
+class AzureDatabricksManagementClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
"""Configuration for AzureDatabricksManagementClient.
Note that all parameters used to create this instance are saved as instance
@@ -27,12 +25,16 @@ class AzureDatabricksManagementClientConfiguration(Configuration): # pylint: di
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials.TokenCredential
- :param subscription_id: The ID of the target subscription. Required.
+ :param subscription_id: The ID of the target subscription. The value must be an UUID. Required.
:type subscription_id: str
+ :keyword api_version: Api Version. Default value is "2025-03-01-preview". Note that overriding
+ this default value may result in unsupported behavior.
+ :paramtype api_version: str
"""
def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None:
- super(AzureDatabricksManagementClientConfiguration, self).__init__(**kwargs)
+ api_version: str = kwargs.pop("api_version", "2025-03-01-preview")
+
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
@@ -40,8 +42,10 @@ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs
self.credential = credential
self.subscription_id = subscription_id
+ self.api_version = api_version
self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"])
kwargs.setdefault("sdk_moniker", "mgmt-databricks/{}".format(VERSION))
+ self.polling_interval = kwargs.get("polling_interval", 30)
self._configure(**kwargs)
def _configure(self, **kwargs: Any) -> None:
@@ -50,9 +54,9 @@ def _configure(self, **kwargs: Any) -> None:
self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs)
- self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs)
+ self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
self.authentication_policy = kwargs.get("authentication_policy")
if self.credential and not self.authentication_policy:
self.authentication_policy = ARMChallengeAuthenticationPolicy(
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_serialization.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_serialization.py
index 842ae727fbbc..b24ab2885450 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_serialization.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_serialization.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -24,7 +25,6 @@
#
# --------------------------------------------------------------------------
-# pylint: skip-file
# pyright: reportUnnecessaryTypeIgnoreComment=false
from base64 import b64decode, b64encode
@@ -52,7 +52,6 @@
MutableMapping,
Type,
List,
- Mapping,
)
try:
@@ -63,8 +62,8 @@
import isodate # type: ignore
-from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback
-from azure.core.serialization import NULL as AzureCoreNull
+from azure.core.exceptions import DeserializationError, SerializationError
+from azure.core.serialization import NULL as CoreNull
_BOM = codecs.BOM_UTF8.decode(encoding="utf-8")
@@ -91,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
:param data: Input, could be bytes or stream (will be decoded with UTF8) or text
:type data: str or bytes or IO
:param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
"""
if hasattr(data, "read"):
# Assume a stream
@@ -112,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
try:
return json.loads(data_as_str)
except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err)
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
elif "xml" in (content_type or []):
try:
@@ -124,7 +125,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
pass
return ET.fromstring(data_as_str) # nosec
- except ET.ParseError:
+ except ET.ParseError as err:
# It might be because the server has an issue, and returned JSON with
# content-type XML....
# So let's try a JSON load, and if it's still broken
@@ -143,7 +144,9 @@ def _json_attemp(data):
# The function hack is because Py2.7 messes up with exception
# context otherwise.
_LOGGER.critical("Wasn't XML not JSON, failing")
- raise_with_traceback(DeserializationError, "XML is invalid")
+ raise DeserializationError("XML is invalid") from err
+ elif content_type.startswith("text/"):
+ return data_as_str
raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
@classmethod
@@ -153,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
Use bytes and headers to NOT use any requests/aiohttp or whatever
specific implementation.
Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
"""
# Try to use content-type from headers if available
content_type = None
@@ -170,13 +178,6 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
return None
-try:
- basestring # type: ignore
- unicode_str = unicode # type: ignore
-except NameError:
- basestring = str
- unicode_str = str
-
_LOGGER = logging.getLogger(__name__)
try:
@@ -189,15 +190,30 @@ class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
- """UTF offset for UTC is 0."""
+ """UTF offset for UTC is 0.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The offset
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(0)
def tzname(self, dt):
- """Timestamp representation."""
+ """Timestamp representation.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The timestamp representation
+ :rtype: str
+ """
return "Z"
def dst(self, dt):
- """No daylight saving for UTC."""
+ """No daylight saving for UTC.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The daylight saving time
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(hours=1)
@@ -211,7 +227,7 @@ class _FixedOffset(datetime.tzinfo): # type: ignore
:param datetime.timedelta offset: offset in timedelta format
"""
- def __init__(self, offset):
+ def __init__(self, offset) -> None:
self.__offset = offset
def utcoffset(self, dt):
@@ -240,24 +256,26 @@ def __getinitargs__(self):
_FLATTEN = re.compile(r"(? None:
- self.additional_properties: Dict[str, Any] = {}
- for k in kwargs:
+ self.additional_properties: Optional[Dict[str, Any]] = {}
+ for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
elif k in self._validation and self._validation[k].get("readonly", False):
@@ -305,13 +330,23 @@ def __init__(self, **kwargs: Any) -> None:
setattr(self, k, kwargs[k])
def __eq__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
return not self.__eq__(other)
def __str__(self) -> str:
@@ -331,7 +366,11 @@ def is_xml_model(cls) -> bool:
@classmethod
def _create_xml_node(cls):
- """Create XML node."""
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
try:
xml_map = cls._xml_map # type: ignore
except AttributeError:
@@ -340,7 +379,7 @@ def _create_xml_node(cls):
return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None))
def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
- """Return the JSON that would be sent to azure from this model.
+ """Return the JSON that would be sent to server from this model.
This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`.
@@ -351,7 +390,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs)
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
def as_dict(
self,
@@ -385,12 +426,15 @@ def my_key_transformer(key, attr_desc, value):
If you want XML serialization, you can pass the kwargs is_xml=True.
+ :param bool keep_readonly: If you want to serialize the readonly attributes
:param function key_transformer: A key transformer function.
:returns: A dict JSON compatible object
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs)
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
@classmethod
def _infer_class_models(cls):
@@ -400,7 +444,7 @@ def _infer_class_models(cls):
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
if cls.__name__ not in client_models:
raise ValueError("Not Autorest generated code")
- except Exception:
+ except Exception: # pylint: disable=broad-exception-caught
# Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
client_models = {cls.__name__: cls}
return client_models
@@ -413,9 +457,10 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
- return deserializer(cls.__name__, data, content_type=content_type)
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@classmethod
def from_dict(
@@ -431,9 +476,11 @@ def from_dict(
and last_rest_key_case_insensitive_extractor)
:param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
deserializer.key_extractors = ( # type: ignore
@@ -445,7 +492,7 @@ def from_dict(
if key_extractors is None
else key_extractors
)
- return deserializer(cls.__name__, data, content_type=content_type)
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@classmethod
def _flatten_subtype(cls, key, objects):
@@ -453,21 +500,25 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects))
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
return result
@classmethod
def _classify(cls, response, objects):
"""Check the class _subtype_map for any child classes.
We want to ignore any inherited _subtype_maps.
- Remove the polymorphic key from the initial data.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
"""
for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
subtype_value = None
if not isinstance(response, ET.Element):
rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
- subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None)
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
else:
subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
if subtype_value:
@@ -506,11 +557,13 @@ def _decode_attribute_map_key(key):
inside the received data.
:param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
"""
return key.replace("\\.", ".")
-class Serializer(object):
+class Serializer: # pylint: disable=too-many-public-methods
"""Request object model serializer."""
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
@@ -545,7 +598,7 @@ class Serializer(object):
"multiple": lambda x, y: x % y != 0,
}
- def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.serialize_type = {
"iso-8601": Serializer.serialize_iso,
"rfc-1123": Serializer.serialize_rfc,
@@ -561,17 +614,20 @@ def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None):
"[]": self.serialize_iter,
"{}": self.serialize_dict,
}
- self.dependencies: Dict[str, Type[ModelType]] = dict(classes) if classes else {}
+ self.dependencies: Dict[str, type] = dict(classes) if classes else {}
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
- def _serialize(self, target_obj, data_type=None, **kwargs):
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
"""Serialize data into a string according to type.
- :param target_obj: The data to be serialized.
+ :param object target_obj: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str, dict
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
"""
key_transformer = kwargs.get("key_transformer", self.key_transformer)
keep_readonly = kwargs.get("keep_readonly", False)
@@ -597,12 +653,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
serialized = {}
if is_xml_model_serialization:
- serialized = target_obj._create_xml_node()
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
try:
- attributes = target_obj._attribute_map
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
for attr, attr_desc in attributes.items():
attr_name = attr
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
continue
if attr_name == "additional_properties" and attr_desc["key"] == "":
@@ -638,7 +696,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if isinstance(new_attr, list):
serialized.extend(new_attr) # type: ignore
elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
if "name" not in getattr(orig_attr, "_xml_map", {}):
splitted_tag = new_attr.tag.split("}")
if len(splitted_tag) == 2: # Namespace
@@ -649,7 +708,7 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
else: # That's a basic type
# Integrate namespace if necessary
local_node = _create_xml_node(xml_name, xml_prefix, xml_ns)
- local_node.text = unicode_str(new_attr)
+ local_node.text = str(new_attr)
serialized.append(local_node) # type: ignore
else: # JSON
for k in reversed(keys): # type: ignore
@@ -662,23 +721,24 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
_serialized.update(_new_attr) # type: ignore
_new_attr = _new_attr[k] # type: ignore
_serialized = _serialized[k]
- except ValueError:
- continue
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
- raise_with_traceback(SerializationError, msg, err)
- else:
- return serialized
+ raise SerializationError(msg) from err
+ return serialized
def body(self, data, data_type, **kwargs):
"""Serialize data intended for a request body.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: dict
:raises: SerializationError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized request body
"""
# Just in case this is a dict
@@ -707,18 +767,20 @@ def body(self, data, data_type, **kwargs):
attribute_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
]
- data = deserializer._deserialize(data_type, data)
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
except DeserializationError as err:
- raise_with_traceback(SerializationError, "Unable to build a model: " + str(err), err)
+ raise SerializationError("Unable to build a model: " + str(err)) from err
return self._serialize(data, data_type, **kwargs)
def url(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL path.
- :param data: The data to be serialized.
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
+ :returns: The serialized URL path
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
"""
@@ -729,30 +791,30 @@ def url(self, name, data, data_type, **kwargs):
if kwargs.get("skip_quote") is True:
output = str(output)
+ output = output.replace("{", quote("{")).replace("}", quote("}"))
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return output
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
def query(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL query.
- :param data: The data to be serialized.
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :rtype: str
+ :rtype: str, list
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized query parameter
"""
try:
# Treat the list aside, since we don't want to encode the div separator
if data_type.startswith("["):
internal_data_type = data_type[1:-1]
- data = [self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" for d in data]
- if not kwargs.get("skip_quote", False):
- data = [quote(str(d), safe="") for d in data]
- return str(self.serialize_iter(data, internal_data_type, **kwargs))
+ do_quote = not kwargs.get("skip_quote", False)
+ return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)
# Not a list, regular serialization
output = self.serialize_data(data, data_type, **kwargs)
@@ -762,19 +824,20 @@ def query(self, name, data, data_type, **kwargs):
output = str(output)
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def header(self, name, data, data_type, **kwargs):
"""Serialize data intended for a request header.
- :param data: The data to be serialized.
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized header
"""
try:
if data_type in ["[str]"]:
@@ -783,32 +846,31 @@ def header(self, name, data, data_type, **kwargs):
output = self.serialize_data(data, data_type, **kwargs)
if data_type == "bool":
output = json.dumps(output)
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def serialize_data(self, data, data_type, **kwargs):
"""Serialize generic data according to supplied data type.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :param bool required: Whether it's essential that the data not be
- empty or None
:raises: AttributeError if required data is None.
:raises: ValueError if data is None
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
"""
if data is None:
raise ValueError("No value for given attribute")
try:
- if data is AzureCoreNull:
+ if data is CoreNull:
return None
if data_type in self.basic_types.values():
return self.serialize_basic(data, data_type, **kwargs)
- elif data_type in self.serialize_type:
+ if data_type in self.serialize_type:
return self.serialize_type[data_type](data, **kwargs)
# If dependencies is empty, try with current data class
@@ -823,12 +885,11 @@ def serialize_data(self, data, data_type, **kwargs):
except (ValueError, TypeError) as err:
msg = "Unable to serialize value: {!r} as type: {!r}."
- raise_with_traceback(SerializationError, msg.format(data, data_type), err)
- else:
- return self._serialize(data, **kwargs)
+ raise SerializationError(msg.format(data, data_type)) from err
+ return self._serialize(data, **kwargs)
@classmethod
- def _get_custom_serializers(cls, data_type, **kwargs):
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
if custom_serializer:
return custom_serializer
@@ -844,23 +905,26 @@ def serialize_basic(cls, data, data_type, **kwargs):
- basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- is_xml bool : If set, use xml_basic_types_serializers
- :param data: Object to be serialized.
+ :param obj data: Object to be serialized.
:param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
@classmethod
def serialize_unicode(cls, data):
"""Special handling for serializing unicode strings in Py2.
Encode to UTF-8 if unicode, otherwise handle as a str.
- :param data: Object to be serialized.
+ :param str data: Object to be serialized.
:rtype: str
+ :return: serialized object
"""
try: # If I received an enum, return its value
return data.value
@@ -874,8 +938,7 @@ def serialize_unicode(cls, data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
def serialize_iter(self, data, iter_type, div=None, **kwargs):
"""Serialize iterable.
@@ -885,13 +948,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
serialization_ctxt['type'] should be same as data_type.
- is_xml bool : If set, serialize as XML
- :param list attr: Object to be serialized.
+ :param list data: Object to be serialized.
:param str iter_type: Type of object in the iterable.
- :param bool required: Whether the objects in the iterable must
- not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
+ Defaults to False.
:rtype: list, str
+ :return: serialized iterable
"""
if isinstance(data, str):
raise SerializationError("Refuse str type as a valid iter type.")
@@ -903,9 +966,14 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
for d in data:
try:
serialized.append(self.serialize_data(d, iter_type, **kwargs))
- except ValueError:
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
serialized.append(None)
+ if kwargs.get("do_quote", False):
+ serialized = ["" if s is None else quote(str(s), safe="") for s in serialized]
+
if div:
serialized = ["" if s is None else str(s) for s in serialized]
serialized = div.join(serialized)
@@ -941,16 +1009,17 @@ def serialize_dict(self, attr, dict_type, **kwargs):
:param dict attr: Object to be serialized.
:param str dict_type: Type of object in the dictionary.
- :param bool required: Whether the objects in the dictionary must
- not be None or empty.
:rtype: dict
+ :return: serialized dictionary
"""
serialization_ctxt = kwargs.get("serialization_ctxt", {})
serialized = {}
for key, value in attr.items():
try:
serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs)
- except ValueError:
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
serialized[self.serialize_unicode(key)] = None
if "xml" in serialization_ctxt:
@@ -965,7 +1034,7 @@ def serialize_dict(self, attr, dict_type, **kwargs):
return serialized
- def serialize_object(self, attr, **kwargs):
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Serialize a generic object.
This will be handled as a dictionary. If object passed in is not
a basic type (str, int, float, dict, list) it will simply be
@@ -973,6 +1042,7 @@ def serialize_object(self, attr, **kwargs):
:param dict attr: Object to be serialized.
:rtype: dict or str
+ :return: serialized object
"""
if attr is None:
return None
@@ -983,7 +1053,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs)
if obj_type is _long_type:
return self.serialize_long(attr)
- if obj_type is unicode_str:
+ if obj_type is str:
return self.serialize_unicode(attr)
if obj_type is datetime.datetime:
return self.serialize_iso(attr)
@@ -997,7 +1067,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_decimal(attr)
# If it's a model or I know this dependency, serialize as a Model
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
return self._serialize(attr)
if obj_type == dict:
@@ -1028,56 +1098,61 @@ def serialize_enum(attr, enum_obj=None):
try:
enum_obj(result) # type: ignore
return result
- except ValueError:
+ except ValueError as exc:
for enum_value in enum_obj: # type: ignore
if enum_value.value.lower() == str(attr).lower():
return enum_value.value
error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj))
+ raise SerializationError(error.format(attr, enum_obj)) from exc
@staticmethod
- def serialize_bytearray(attr, **kwargs):
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize bytearray into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
return b64encode(attr).decode()
@staticmethod
- def serialize_base64(attr, **kwargs):
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize str into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
encoded = b64encode(attr).decode("ascii")
return encoded.strip("=").replace("+", "-").replace("/", "_")
@staticmethod
- def serialize_decimal(attr, **kwargs):
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Decimal object to float.
- :param attr: Object to be serialized.
+ :param decimal attr: Object to be serialized.
:rtype: float
+ :return: serialized decimal
"""
return float(attr)
@staticmethod
- def serialize_long(attr, **kwargs):
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize long (Py2) or int (Py3).
- :param attr: Object to be serialized.
+ :param int attr: Object to be serialized.
:rtype: int/long
+ :return: serialized long
"""
return _long_type(attr)
@staticmethod
- def serialize_date(attr, **kwargs):
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Date object into ISO-8601 formatted string.
:param Date attr: Object to be serialized.
:rtype: str
+ :return: serialized date
"""
if isinstance(attr, str):
attr = isodate.parse_date(attr)
@@ -1085,11 +1160,12 @@ def serialize_date(attr, **kwargs):
return t
@staticmethod
- def serialize_time(attr, **kwargs):
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Time object into ISO-8601 formatted string.
:param datetime.time attr: Object to be serialized.
:rtype: str
+ :return: serialized time
"""
if isinstance(attr, str):
attr = isodate.parse_time(attr)
@@ -1099,30 +1175,32 @@ def serialize_time(attr, **kwargs):
return t
@staticmethod
- def serialize_duration(attr, **kwargs):
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize TimeDelta object into ISO-8601 formatted string.
:param TimeDelta attr: Object to be serialized.
:rtype: str
+ :return: serialized duration
"""
if isinstance(attr, str):
attr = isodate.parse_duration(attr)
return isodate.duration_isoformat(attr)
@staticmethod
- def serialize_rfc(attr, **kwargs):
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into RFC-1123 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: TypeError if format invalid.
+ :return: serialized rfc
"""
try:
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
utc = attr.utctimetuple()
- except AttributeError:
- raise TypeError("RFC1123 object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
Serializer.days[utc.tm_wday],
@@ -1135,12 +1213,13 @@ def serialize_rfc(attr, **kwargs):
)
@staticmethod
- def serialize_iso(attr, **kwargs):
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: SerializationError if format invalid.
+ :return: serialized iso
"""
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
@@ -1160,19 +1239,20 @@ def serialize_iso(attr, **kwargs):
return date + microseconds + "Z"
except (ValueError, OverflowError) as err:
msg = "Unable to serialize datetime object."
- raise_with_traceback(SerializationError, msg, err)
+ raise SerializationError(msg) from err
except AttributeError as err:
msg = "ISO-8601 object must be valid Datetime object."
- raise_with_traceback(TypeError, msg, err)
+ raise TypeError(msg) from err
@staticmethod
- def serialize_unix(attr, **kwargs):
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into IntTime format.
This is represented as seconds.
:param Datetime attr: Object to be serialized.
:rtype: int
:raises: SerializationError if format invalid
+ :return: serialied unix
"""
if isinstance(attr, int):
return attr
@@ -1180,11 +1260,11 @@ def serialize_unix(attr, **kwargs):
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError:
- raise TypeError("Unix time object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
-def rest_key_extractor(attr, attr_desc, data):
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
key = attr_desc["key"]
working_data = data
@@ -1199,14 +1279,15 @@ def rest_key_extractor(attr, attr_desc, data):
if working_data is None:
# If at any point while following flatten JSON path see None, it means
# that all properties under are None as well
- # https://github.com/Azure/msrest-for-python/issues/197
return None
key = ".".join(dict_keys[1:])
return working_data.get(key)
-def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
key = attr_desc["key"]
working_data = data
@@ -1220,7 +1301,6 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
if working_data is None:
# If at any point while following flatten JSON path see None, it means
# that all properties under are None as well
- # https://github.com/Azure/msrest-for-python/issues/197
return None
key = ".".join(dict_keys[1:])
@@ -1228,17 +1308,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
return attribute_key_case_insensitive_extractor(key, None, working_data)
-def last_rest_key_extractor(attr, attr_desc, data):
- """Extract the attribute in "data" based on the last part of the JSON path key."""
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
return attribute_key_extractor(dict_keys[-1], None, data)
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
"""Extract the attribute in "data" based on the last part of the JSON path key.
This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
"""
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
@@ -1275,7 +1367,7 @@ def _extract_name_from_internal_type(internal_type):
return xml_name
-def xml_key_extractor(attr, attr_desc, data):
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
if isinstance(data, dict):
return None
@@ -1327,22 +1419,21 @@ def xml_key_extractor(attr, attr_desc, data):
if is_iter_type:
if is_wrapped:
return None # is_wrapped no node, we want None
- else:
- return [] # not wrapped, assume empty list
+ return [] # not wrapped, assume empty list
return None # Assume it's not there, maybe an optional node.
# If is_iter_type and not wrapped, return all found children
if is_iter_type:
if not is_wrapped:
return children
- else: # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
)
- return list(children[0]) # Might be empty list and that's ok.
+ )
+ return list(children[0]) # Might be empty list and that's ok.
# Here it's not a itertype, we should have found one element only or empty
if len(children) > 1:
@@ -1350,7 +1441,7 @@ def xml_key_extractor(attr, attr_desc, data):
return children[0]
-class Deserializer(object):
+class Deserializer:
"""Response object model deserializer.
:param dict classes: Class type dictionary for deserializing complex types.
@@ -1359,9 +1450,9 @@ class Deserializer(object):
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
- def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.deserialize_type = {
"iso-8601": Deserializer.deserialize_iso,
"rfc-1123": Deserializer.deserialize_rfc,
@@ -1381,7 +1472,7 @@ def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None):
"duration": (isodate.Duration, datetime.timedelta),
"iso-8601": (datetime.datetime),
}
- self.dependencies: Dict[str, Type[ModelType]] = dict(classes) if classes else {}
+ self.dependencies: Dict[str, type] = dict(classes) if classes else {}
self.key_extractors = [rest_key_extractor, xml_key_extractor]
# Additional properties only works if the "rest_key_extractor" is used to
# extract the keys. Making it to work whatever the key extractor is too much
@@ -1399,11 +1490,12 @@ def __call__(self, target_obj, response_data, content_type=None):
:param str content_type: Swagger "produces" if available.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
data = self._unpack_content(response_data, content_type)
return self._deserialize(target_obj, data)
- def _deserialize(self, target_obj, data):
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
"""Call the deserializer on a model.
Data needs to be already deserialized as JSON or XML ElementTree
@@ -1412,12 +1504,13 @@ def _deserialize(self, target_obj, data):
:param object data: Object to deserialize.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
# This is already a model, go recursive just in case
if hasattr(data, "_attribute_map"):
constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
try:
- for attr, mapconfig in data._attribute_map.items():
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
if attr in constants:
continue
value = getattr(data, attr)
@@ -1434,15 +1527,15 @@ def _deserialize(self, target_obj, data):
response, class_name = self._classify_target(target_obj, data)
- if isinstance(response, basestring):
+ if isinstance(response, str):
return self.deserialize_data(data, response)
- elif isinstance(response, type) and issubclass(response, Enum):
+ if isinstance(response, type) and issubclass(response, Enum):
return self.deserialize_enum(data, response)
- if data is None:
+ if data is None or data is CoreNull:
return data
try:
- attributes = response._attribute_map # type: ignore
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
d_attrs = {}
for attr, attr_desc in attributes.items():
# Check empty string. If it's not empty, someone has a real "additionalProperties"...
@@ -1471,10 +1564,9 @@ def _deserialize(self, target_obj, data):
d_attrs[attr] = value
except (AttributeError, TypeError, KeyError) as err:
msg = "Unable to deserialize to object: " + class_name # type: ignore
- raise_with_traceback(DeserializationError, msg, err)
- else:
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
+ raise DeserializationError(msg) from err
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
def _build_additional_properties(self, attribute_map, data):
if not self.additional_properties_detection:
@@ -1501,18 +1593,20 @@ def _classify_target(self, target, data):
:param str target: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
"""
if target is None:
return None, None
- if isinstance(target, basestring):
+ if isinstance(target, str):
try:
target = self.dependencies[target]
except KeyError:
return target, target
try:
- target = target._classify(data, self.dependencies)
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
except AttributeError:
pass # Target is not a Model, no classify
return target, target.__class__.__name__ # type: ignore
@@ -1527,10 +1621,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None):
:param str target_obj: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
:param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
"""
try:
return self(target_obj, data, content_type=content_type)
- except:
+ except: # pylint: disable=bare-except
_LOGGER.debug(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -1548,10 +1644,12 @@ def _unpack_content(raw_data, content_type=None):
If raw_data is something else, bypass all logic and return it directly.
- :param raw_data: Data to be processed.
- :param content_type: How to parse if raw_data is a string/bytes.
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
:raises JSONDecodeError: If JSON is requested and parsing is impossible.
:raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
"""
# Assume this is enough to detect a Pipeline Response without importing it
context = getattr(raw_data, "context", {})
@@ -1568,31 +1666,42 @@ def _unpack_content(raw_data, content_type=None):
if hasattr(raw_data, "_content_consumed"):
return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers)
- if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"):
+ if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"):
return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore
return raw_data
def _instantiate_model(self, response, attrs, additional_properties=None):
"""Instantiate a response model passing in deserialized args.
- :param response: The response model class.
- :param d_attrs: The deserialized response attributes.
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
"""
if callable(response):
subtype = getattr(response, "_subtype_map", {})
try:
- readonly = [k for k, v in response._validation.items() if v.get("readonly")]
- const = [k for k, v in response._validation.items() if v.get("constant")]
+ readonly = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("readonly")
+ ]
+ const = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("constant")
+ ]
kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
response_obj = response(**kwargs)
for attr in readonly:
setattr(response_obj, attr, attrs.get(attr))
if additional_properties:
- response_obj.additional_properties = additional_properties
+ response_obj.additional_properties = additional_properties # type: ignore
return response_obj
except TypeError as err:
msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err))
+ raise DeserializationError(msg + str(err)) from err
else:
try:
for attr, value in attrs.items():
@@ -1601,15 +1710,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
except Exception as exp:
msg = "Unable to populate response model. "
msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg)
+ raise DeserializationError(msg) from exp
- def deserialize_data(self, data, data_type):
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
"""Process data for deserialization according to data type.
:param str data: The response string to be deserialized.
:param str data_type: The type to deserialize to.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
if data is None:
return data
@@ -1623,7 +1733,11 @@ def deserialize_data(self, data, data_type):
if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
return data
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
return None
data_val = self.deserialize_type[data_type](data)
@@ -1642,15 +1756,15 @@ def deserialize_data(self, data, data_type):
except (ValueError, TypeError, AttributeError) as err:
msg = "Unable to deserialize response data."
msg += " Data: {}, {}".format(data, data_type)
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return self._deserialize(obj_type, data)
+ raise DeserializationError(msg) from err
+ return self._deserialize(obj_type, data)
def deserialize_iter(self, attr, iter_type):
"""Deserialize an iterable.
:param list attr: Iterable to be deserialized.
:param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
:rtype: list
"""
if attr is None:
@@ -1667,6 +1781,7 @@ def deserialize_dict(self, attr, dict_type):
:param dict/list attr: Dictionary to be deserialized. Also accepts
a list of key, value pairs.
:param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
:rtype: dict
"""
if isinstance(attr, list):
@@ -1677,11 +1792,12 @@ def deserialize_dict(self, attr, dict_type):
attr = {el.tag: el.text for el in attr}
return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
- def deserialize_object(self, attr, **kwargs):
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Deserialize a generic object.
This will be handled as a dictionary.
:param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
:rtype: dict
:raises: TypeError if non-builtin datatype encountered.
"""
@@ -1690,7 +1806,7 @@ def deserialize_object(self, attr, **kwargs):
if isinstance(attr, ET.Element):
# Do no recurse on XML, just return the tree as-is
return attr
- if isinstance(attr, basestring):
+ if isinstance(attr, str):
return self.deserialize_basic(attr, "str")
obj_type = type(attr)
if obj_type in self.basic_types:
@@ -1716,11 +1832,10 @@ def deserialize_object(self, attr, **kwargs):
pass
return deserialized
- else:
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
- def deserialize_basic(self, attr, data_type):
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
"""Deserialize basic builtin data type from string.
Will attempt to convert to str, int, float and bool.
This function will also accept '1', '0', 'true' and 'false' as
@@ -1728,6 +1843,7 @@ def deserialize_basic(self, attr, data_type):
:param str attr: response string to be deserialized.
:param str data_type: deserialization data type.
+ :return: Deserialized basic type.
:rtype: str, int, float or bool
:raises: TypeError if string format is not valid.
"""
@@ -1739,24 +1855,23 @@ def deserialize_basic(self, attr, data_type):
if data_type == "str":
# None or '', node is empty string.
return ""
- else:
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
if data_type == "bool":
if attr in [True, False, 1, 0]:
return bool(attr)
- elif isinstance(attr, basestring):
+ if isinstance(attr, str):
if attr.lower() in ["true", "1"]:
return True
- elif attr.lower() in ["false", "0"]:
+ if attr.lower() in ["false", "0"]:
return False
raise TypeError("Invalid boolean value: {}".format(attr))
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
@staticmethod
def deserialize_unicode(data):
@@ -1764,6 +1879,7 @@ def deserialize_unicode(data):
as a string.
:param str data: response string to be deserialized.
+ :return: Deserialized string.
:rtype: str or unicode
"""
# We might be here because we have an enum modeled as string,
@@ -1777,8 +1893,7 @@ def deserialize_unicode(data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
@staticmethod
def deserialize_enum(data, enum_obj):
@@ -1790,6 +1905,7 @@ def deserialize_enum(data, enum_obj):
:param str data: Response string to be deserialized. If this value is
None or invalid it will be returned as-is.
:param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
:rtype: Enum
"""
if isinstance(data, enum_obj) or data is None:
@@ -1798,12 +1914,11 @@ def deserialize_enum(data, enum_obj):
data = data.value
if isinstance(data, int):
# Workaround. We might consider remove it in the future.
- # https://github.com/Azure/azure-rest-api-specs/issues/141
try:
return list(enum_obj.__members__.values())[data]
- except IndexError:
+ except IndexError as exc:
error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj))
+ raise DeserializationError(error.format(data, enum_obj)) from exc
try:
return enum_obj(str(data))
except ValueError:
@@ -1819,6 +1934,7 @@ def deserialize_bytearray(attr):
"""Deserialize string into bytearray.
:param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1831,6 +1947,7 @@ def deserialize_base64(attr):
"""Deserialize base64 encoded string into string.
:param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1846,22 +1963,24 @@ def deserialize_decimal(attr):
"""Deserialize string into Decimal object.
:param str attr: response string to be deserialized.
- :rtype: Decimal
+ :return: Deserialized decimal
:raises: DeserializationError if string format invalid.
+ :rtype: decimal
"""
if isinstance(attr, ET.Element):
attr = attr.text
try:
- return decimal.Decimal(attr) # type: ignore
+ return decimal.Decimal(str(attr)) # type: ignore
except decimal.DecimalException as err:
msg = "Invalid decimal {}".format(attr)
- raise_with_traceback(DeserializationError, msg, err)
+ raise DeserializationError(msg) from err
@staticmethod
def deserialize_long(attr):
"""Deserialize string into long (Py2) or int (Py3).
:param str attr: response string to be deserialized.
+ :return: Deserialized int
:rtype: long or int
:raises: ValueError if string format invalid.
"""
@@ -1874,6 +1993,7 @@ def deserialize_duration(attr):
"""Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
+ :return: Deserialized duration
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
"""
@@ -1883,15 +2003,15 @@ def deserialize_duration(attr):
duration = isodate.parse_duration(attr)
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return duration
+ raise DeserializationError(msg) from err
+ return duration
@staticmethod
def deserialize_date(attr):
"""Deserialize ISO-8601 formatted string into Date object.
:param str attr: response string to be deserialized.
+ :return: Deserialized date
:rtype: Date
:raises: DeserializationError if string format invalid.
"""
@@ -1900,13 +2020,14 @@ def deserialize_date(attr):
if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
# This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
- return isodate.parse_date(attr, defaultmonth=None, defaultday=None)
+ return isodate.parse_date(attr, defaultmonth=0, defaultday=0)
@staticmethod
def deserialize_time(attr):
"""Deserialize ISO-8601 formatted string into time object.
:param str attr: response string to be deserialized.
+ :return: Deserialized time
:rtype: datetime.time
:raises: DeserializationError if string format invalid.
"""
@@ -1921,6 +2042,7 @@ def deserialize_rfc(attr):
"""Deserialize RFC-1123 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1935,15 +2057,15 @@ def deserialize_rfc(attr):
date_obj = date_obj.astimezone(tz=TZ_UTC)
except ValueError as err:
msg = "Cannot deserialize to rfc datetime object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return date_obj
+ raise DeserializationError(msg) from err
+ return date_obj
@staticmethod
def deserialize_iso(attr):
"""Deserialize ISO-8601 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1972,9 +2094,8 @@ def deserialize_iso(attr):
raise OverflowError("Hit max or min date")
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize datetime object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return date_obj
+ raise DeserializationError(msg) from err
+ return date_obj
@staticmethod
def deserialize_unix(attr):
@@ -1982,15 +2103,16 @@ def deserialize_unix(attr):
This is represented as seconds.
:param int attr: Object to be serialized.
+ :return: Deserialized datetime
:rtype: Datetime
:raises: DeserializationError if format invalid
"""
if isinstance(attr, ET.Element):
attr = int(attr.text) # type: ignore
try:
+ attr = int(attr)
date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC)
except ValueError as err:
msg = "Cannot deserialize to unix datetime object."
- raise_with_traceback(DeserializationError, msg, err)
- else:
- return date_obj
+ raise DeserializationError(msg) from err
+ return date_obj
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_vendor.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_vendor.py
deleted file mode 100644
index bd0df84f5319..000000000000
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_vendor.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-
-from typing import List, cast
-
-from azure.core.pipeline.transport import HttpRequest
-
-
-def _convert_request(request, files=None):
- data = request.content if not files else None
- request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data)
- if files:
- request.set_formdata_body(files)
- return request
-
-
-def _format_url_section(template, **kwargs):
- components = template.split("/")
- while components:
- try:
- return template.format(**kwargs)
- except KeyError as key:
- # Need the cast, as for some reasons "split" is typed as list[str | Any]
- formatted_components = cast(List[str], template.split("/"))
- components = [c for c in formatted_components if "{}".format(key.args[0]) not in c]
- template = "/".join(components)
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_version.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_version.py
index 48944bf3938a..5819b888fe6e 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_version.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_version.py
@@ -6,4 +6,4 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-VERSION = "2.0.0"
+VERSION = "3.0.0b1"
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/__init__.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/__init__.py
index 6545dba75512..2629940e60db 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/__init__.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/__init__.py
@@ -5,12 +5,18 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._azure_databricks_management_client import AzureDatabricksManagementClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._azure_databricks_management_client import AzureDatabricksManagementClient # type: ignore
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -18,6 +24,6 @@
__all__ = [
"AzureDatabricksManagementClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_azure_databricks_management_client.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_azure_databricks_management_client.py
index f2fb0d7e332c..38b7be2e1a7c 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_azure_databricks_management_client.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_azure_databricks_management_client.py
@@ -8,9 +8,12 @@
from copy import deepcopy
from typing import Any, Awaitable, TYPE_CHECKING
+from typing_extensions import Self
+from azure.core.pipeline import policies
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
+from azure.mgmt.core.policies import AsyncARMAutoResourceProviderRegistrationPolicy
from .. import models as _models
from .._serialization import Deserializer, Serializer
@@ -26,11 +29,10 @@
)
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
-class AzureDatabricksManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes
+class AzureDatabricksManagementClient: # pylint: disable=too-many-instance-attributes
"""The Microsoft Azure management APIs allow end users to operate on Azure Databricks Workspace /
Access Connector resources.
@@ -54,10 +56,13 @@ class AzureDatabricksManagementClient: # pylint: disable=client-accepts-api-ver
:vartype access_connectors: azure.mgmt.databricks.aio.operations.AccessConnectorsOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
- :param subscription_id: The ID of the target subscription. Required.
+ :param subscription_id: The ID of the target subscription. The value must be an UUID. Required.
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
+ :keyword api_version: Api Version. Default value is "2025-03-01-preview". Note that overriding
+ this default value may result in unsupported behavior.
+ :paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
@@ -72,7 +77,25 @@ def __init__(
self._config = AzureDatabricksManagementClientConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
- self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+ _policies = kwargs.pop("policies", None)
+ if _policies is None:
+ _policies = [
+ policies.RequestIdPolicy(**kwargs),
+ self._config.headers_policy,
+ self._config.user_agent_policy,
+ self._config.proxy_policy,
+ policies.ContentDecodePolicy(**kwargs),
+ AsyncARMAutoResourceProviderRegistrationPolicy(),
+ self._config.redirect_policy,
+ self._config.retry_policy,
+ self._config.authentication_policy,
+ self._config.custom_hook_policy,
+ self._config.logging_policy,
+ policies.DistributedTracingPolicy(**kwargs),
+ policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None,
+ self._config.http_logging_policy,
+ ]
+ self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, policies=_policies, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
@@ -94,7 +117,9 @@ def __init__(
self._client, self._config, self._serialize, self._deserialize
)
- def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
+ def _send_request(
+ self, request: HttpRequest, *, stream: bool = False, **kwargs: Any
+ ) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
@@ -114,12 +139,12 @@ def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncH
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
- return self._client.send_request(request_copy, **kwargs)
+ return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore
async def close(self) -> None:
await self._client.close()
- async def __aenter__(self) -> "AzureDatabricksManagementClient":
+ async def __aenter__(self) -> Self:
await self._client.__aenter__()
return self
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_configuration.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_configuration.py
index ba1486eedc60..c8771768a227 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_configuration.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_configuration.py
@@ -8,18 +8,16 @@
from typing import Any, TYPE_CHECKING
-from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy
from .._version import VERSION
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
-class AzureDatabricksManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
+class AzureDatabricksManagementClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
"""Configuration for AzureDatabricksManagementClient.
Note that all parameters used to create this instance are saved as instance
@@ -27,12 +25,16 @@ class AzureDatabricksManagementClientConfiguration(Configuration): # pylint: di
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
- :param subscription_id: The ID of the target subscription. Required.
+ :param subscription_id: The ID of the target subscription. The value must be an UUID. Required.
:type subscription_id: str
+ :keyword api_version: Api Version. Default value is "2025-03-01-preview". Note that overriding
+ this default value may result in unsupported behavior.
+ :paramtype api_version: str
"""
def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None:
- super(AzureDatabricksManagementClientConfiguration, self).__init__(**kwargs)
+ api_version: str = kwargs.pop("api_version", "2025-03-01-preview")
+
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
@@ -40,8 +42,10 @@ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **k
self.credential = credential
self.subscription_id = subscription_id
+ self.api_version = api_version
self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"])
kwargs.setdefault("sdk_moniker", "mgmt-databricks/{}".format(VERSION))
+ self.polling_interval = kwargs.get("polling_interval", 30)
self._configure(**kwargs)
def _configure(self, **kwargs: Any) -> None:
@@ -50,9 +54,9 @@ def _configure(self, **kwargs: Any) -> None:
self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs)
- self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs)
+ self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs)
self.authentication_policy = kwargs.get("authentication_policy")
if self.credential and not self.authentication_policy:
self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/__init__.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/__init__.py
index fb2a003837b4..d3f9857a4004 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/__init__.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/__init__.py
@@ -5,17 +5,23 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._workspaces_operations import WorkspacesOperations
-from ._operations import Operations
-from ._private_link_resources_operations import PrivateLinkResourcesOperations
-from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
-from ._outbound_network_dependencies_endpoints_operations import OutboundNetworkDependenciesEndpointsOperations
-from ._vnet_peering_operations import VNetPeeringOperations
-from ._access_connectors_operations import AccessConnectorsOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._workspaces_operations import WorkspacesOperations # type: ignore
+from ._operations import Operations # type: ignore
+from ._private_link_resources_operations import PrivateLinkResourcesOperations # type: ignore
+from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations # type: ignore
+from ._outbound_network_dependencies_endpoints_operations import OutboundNetworkDependenciesEndpointsOperations # type: ignore
+from ._vnet_peering_operations import VNetPeeringOperations # type: ignore
+from ._access_connectors_operations import AccessConnectorsOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -27,5 +33,5 @@
"VNetPeeringOperations",
"AccessConnectorsOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_access_connectors_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_access_connectors_operations.py
index c27bbf1b9c6d..122a7097918e 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_access_connectors_operations.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_access_connectors_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,9 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
@@ -16,12 +17,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -29,7 +31,6 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._access_connectors_operations import (
build_create_or_update_request,
build_delete_request,
@@ -39,6 +40,10 @@
build_update_request,
)
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -64,19 +69,18 @@ def __init__(self, *args, **kwargs) -> None:
@distributed_trace_async
async def get(self, resource_group_name: str, connector_name: str, **kwargs: Any) -> _models.AccessConnector:
- """Gets an azure databricks accessConnector.
+ """Gets an Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: AccessConnector or the result of cls(response)
:rtype: ~azure.mgmt.databricks.models.AccessConnector
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -87,24 +91,22 @@ async def get(self, resource_group_name: str, connector_name: str, **kwargs: Any
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.AccessConnector] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
connector_name=connector_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -114,21 +116,17 @@ async def get(self, resource_group_name: str, connector_name: str, **kwargs: Any
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("AccessConnector", pipeline_response)
+ deserialized = self._deserialize("AccessConnector", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"
- }
+ return deserialized # type: ignore
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
+ async def _delete_initial(
self, resource_group_name: str, connector_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -139,57 +137,52 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
connector_name=connector_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(self, resource_group_name: str, connector_name: str, **kwargs: Any) -> AsyncLROPoller[None]:
- """Deletes the azure databricks accessConnector.
+ """Deletes the Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -197,13 +190,13 @@ async def begin_delete(self, resource_group_name: str, connector_name: str, **kw
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
connector_name=connector_name,
api_version=api_version,
@@ -212,11 +205,12 @@ async def begin_delete(self, resource_group_name: str, connector_name: str, **kw
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
@@ -225,26 +219,22 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"
- }
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
connector_name: str,
- parameters: Union[_models.AccessConnector, IO],
+ parameters: Union[_models.AccessConnector, IO[bytes]],
**kwargs: Any
- ) -> _models.AccessConnector:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -255,9 +245,9 @@ async def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.AccessConnector] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -267,7 +257,7 @@ async def _create_or_update_initial(
else:
_json = self._serialize.body(parameters, "AccessConnector")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
connector_name=connector_name,
subscription_id=self._config.subscription_id,
@@ -275,40 +265,35 @@ async def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("AccessConnector", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("AccessConnector", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"
- }
-
@overload
async def begin_create_or_update(
self,
@@ -319,27 +304,19 @@ async def begin_create_or_update(
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.AccessConnector]:
- """Creates or updates azure databricks accessConnector.
+ """Creates or updates Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :param parameters: Parameters supplied to the create or update an azure databricks
- accessConnector. Required.
+ :param parameters: Parameters supplied to the create or update an Azure Databricks Access
+ Connector. Required.
:type parameters: ~azure.mgmt.databricks.models.AccessConnector
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either AccessConnector or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.AccessConnector]
@@ -351,32 +328,24 @@ async def begin_create_or_update(
self,
resource_group_name: str,
connector_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.AccessConnector]:
- """Creates or updates azure databricks accessConnector.
+ """Creates or updates Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :param parameters: Parameters supplied to the create or update an azure databricks
- accessConnector. Required.
- :type parameters: IO
+ :param parameters: Parameters supplied to the create or update an Azure Databricks Access
+ Connector. Required.
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either AccessConnector or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.AccessConnector]
@@ -388,30 +357,19 @@ async def begin_create_or_update(
self,
resource_group_name: str,
connector_name: str,
- parameters: Union[_models.AccessConnector, IO],
+ parameters: Union[_models.AccessConnector, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.AccessConnector]:
- """Creates or updates azure databricks accessConnector.
+ """Creates or updates Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :param parameters: Parameters supplied to the create or update an azure databricks
- accessConnector. Is either a AccessConnector type or a IO type. Required.
- :type parameters: ~azure.mgmt.databricks.models.AccessConnector or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Parameters supplied to the create or update an Azure Databricks Access
+ Connector. Is either a AccessConnector type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.databricks.models.AccessConnector or IO[bytes]
:return: An instance of AsyncLROPoller that returns either AccessConnector or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.AccessConnector]
@@ -420,7 +378,7 @@ async def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.AccessConnector] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
@@ -438,12 +396,13 @@ async def begin_create_or_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("AccessConnector", pipeline_response)
+ deserialized = self._deserialize("AccessConnector", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -453,26 +412,24 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.AccessConnector].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"
- }
+ return AsyncLROPoller[_models.AccessConnector](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
async def _update_initial(
self,
resource_group_name: str,
connector_name: str,
- parameters: Union[_models.AccessConnectorUpdate, IO],
+ parameters: Union[_models.AccessConnectorUpdate, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.AccessConnector]:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -483,9 +440,9 @@ async def _update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.AccessConnector]] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -495,7 +452,7 @@ async def _update_initial(
else:
_json = self._serialize.body(parameters, "AccessConnectorUpdate")
- request = build_update_request(
+ _request = build_update_request(
resource_group_name=resource_group_name,
connector_name=connector_name,
subscription_id=self._config.subscription_id,
@@ -503,37 +460,34 @@ async def _update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize("AccessConnector", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"
- }
+ return deserialized # type: ignore
@overload
async def begin_update(
@@ -545,26 +499,18 @@ async def begin_update(
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.AccessConnector]:
- """Updates an azure databricks accessConnector.
+ """Updates an Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :param parameters: The update to the azure databricks accessConnector. Required.
+ :param parameters: The update to the Azure Databricks Access Connector. Required.
:type parameters: ~azure.mgmt.databricks.models.AccessConnectorUpdate
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either AccessConnector or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.AccessConnector]
@@ -576,31 +522,23 @@ async def begin_update(
self,
resource_group_name: str,
connector_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.AccessConnector]:
- """Updates an azure databricks accessConnector.
+ """Updates an Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :param parameters: The update to the azure databricks accessConnector. Required.
- :type parameters: IO
+ :param parameters: The update to the Azure Databricks Access Connector. Required.
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either AccessConnector or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.AccessConnector]
@@ -612,30 +550,19 @@ async def begin_update(
self,
resource_group_name: str,
connector_name: str,
- parameters: Union[_models.AccessConnectorUpdate, IO],
+ parameters: Union[_models.AccessConnectorUpdate, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.AccessConnector]:
- """Updates an azure databricks accessConnector.
+ """Updates an Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :param parameters: The update to the azure databricks accessConnector. Is either a
- AccessConnectorUpdate type or a IO type. Required.
- :type parameters: ~azure.mgmt.databricks.models.AccessConnectorUpdate or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: The update to the Azure Databricks Access Connector. Is either a
+ AccessConnectorUpdate type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.databricks.models.AccessConnectorUpdate or IO[bytes]
:return: An instance of AsyncLROPoller that returns either AccessConnector or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.AccessConnector]
@@ -644,7 +571,7 @@ async def begin_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.AccessConnector] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
@@ -662,12 +589,13 @@ async def begin_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("AccessConnector", pipeline_response)
+ deserialized = self._deserialize("AccessConnector", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -677,28 +605,25 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.AccessConnector].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"
- }
+ return AsyncLROPoller[_models.AccessConnector](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace
def list_by_resource_group(
self, resource_group_name: str, **kwargs: Any
) -> AsyncIterable["_models.AccessConnector"]:
- """Gets all the azure databricks accessConnectors within a resource group.
+ """Gets all the Azure Databricks Access Connectors within a resource group.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AccessConnector or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.AccessConnector]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -706,10 +631,10 @@ def list_by_resource_group(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.AccessConnectorListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -720,23 +645,31 @@ def list_by_resource_group(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_resource_group_request(
+ _request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("AccessConnectorListResult", pipeline_response)
@@ -746,11 +679,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -763,15 +696,10 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list_by_resource_group.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors"
- }
-
@distributed_trace
def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.AccessConnector"]:
- """Gets all the azure databricks accessConnectors within a subscription.
+ """Gets all the Azure Databricks Access Connectors within a subscription.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AccessConnector or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.AccessConnector]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -779,10 +707,10 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.AccessCo
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.AccessConnectorListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -793,22 +721,30 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.AccessCo
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_subscription_request(
+ _request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_subscription.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("AccessConnectorListResult", pipeline_response)
@@ -818,11 +754,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -834,7 +770,3 @@ async def get_next(next_link=None):
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
-
- list_by_subscription.metadata = {
- "url": "/subscriptions/{subscriptionId}/providers/Microsoft.Databricks/accessConnectors"
- }
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_operations.py
index de022539b725..5150f930ef59 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_operations.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,7 +5,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
+import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
@@ -18,16 +19,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._operations import build_list_request
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -55,7 +58,6 @@ def __init__(self, *args, **kwargs) -> None:
def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]:
"""Lists all of the available RP operations.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Operation or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.Operation]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -63,10 +65,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]:
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -77,21 +79,29 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]:
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("OperationListResult", pipeline_response)
@@ -101,11 +111,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -117,5 +127,3 @@ async def get_next(next_link=None):
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
-
- list.metadata = {"url": "/providers/Microsoft.Databricks/operations"}
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_outbound_network_dependencies_endpoints_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_outbound_network_dependencies_endpoints_operations.py
index 9ad1ef8fb372..6e8391db884f 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_outbound_network_dependencies_endpoints_operations.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_outbound_network_dependencies_endpoints_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, Callable, Dict, List, Optional, TypeVar
from azure.core.exceptions import (
@@ -17,21 +17,23 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._outbound_network_dependencies_endpoints_operations import build_list_request
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-class OutboundNetworkDependenciesEndpointsOperations:
+class OutboundNetworkDependenciesEndpointsOperations: # pylint: disable=name-too-long
"""
.. warning::
**DO NOT** instantiate this class directly.
@@ -66,12 +68,11 @@ async def list(
:type resource_group_name: str
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: list of OutboundEnvironmentEndpoint or the result of cls(response)
:rtype: list[~azure.mgmt.databricks.models.OutboundEnvironmentEndpoint]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -82,24 +83,22 @@ async def list(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[List[_models.OutboundEnvironmentEndpoint]] = kwargs.pop("cls", None)
- request = build_list_request(
+ _request = build_list_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -109,13 +108,9 @@ async def list(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("[OutboundEnvironmentEndpoint]", pipeline_response)
+ deserialized = self._deserialize("[OutboundEnvironmentEndpoint]", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- return deserialized
-
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints"
- }
+ return deserialized # type: ignore
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_endpoint_connections_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_endpoint_connections_operations.py
index 1a1dc18d5f27..382c711c8084 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_endpoint_connections_operations.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_endpoint_connections_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,9 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
@@ -16,12 +17,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -29,7 +31,6 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._private_endpoint_connections_operations import (
build_create_request,
build_delete_request,
@@ -37,6 +38,10 @@
build_list_request,
)
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -73,7 +78,6 @@ def list(
:type resource_group_name: str
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PrivateEndpointConnection or the result of
cls(response)
:rtype:
@@ -83,10 +87,10 @@ def list(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.PrivateEndpointConnectionsList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -97,24 +101,32 @@ def list(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("PrivateEndpointConnectionsList", pipeline_response)
@@ -124,11 +136,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -141,10 +153,6 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections"
- }
-
@distributed_trace_async
async def get(
self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any
@@ -160,12 +168,11 @@ async def get(
:type workspace_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection. Required.
:type private_endpoint_connection_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateEndpointConnection or the result of cls(response)
:rtype: ~azure.mgmt.databricks.models.PrivateEndpointConnection
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -176,25 +183,23 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -204,26 +209,22 @@ async def get(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
+ deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"
- }
+ return deserialized # type: ignore
async def _create_initial(
self,
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
- private_endpoint_connection: Union[_models.PrivateEndpointConnection, IO],
+ private_endpoint_connection: Union[_models.PrivateEndpointConnection, IO[bytes]],
**kwargs: Any
- ) -> _models.PrivateEndpointConnection:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -234,9 +235,9 @@ async def _create_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -246,7 +247,7 @@ async def _create_initial(
else:
_json = self._serialize.body(private_endpoint_connection, "PrivateEndpointConnection")
- request = build_create_request(
+ _request = build_create_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
@@ -255,40 +256,35 @@ async def _create_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
-
- if response.status_code == 202:
- deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"
- }
-
@overload
async def begin_create(
self,
@@ -317,14 +313,6 @@ async def begin_create(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PrivateEndpointConnection or the
result of cls(response)
:rtype:
@@ -338,7 +326,7 @@ async def begin_create(
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
- private_endpoint_connection: IO,
+ private_endpoint_connection: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -356,18 +344,10 @@ async def begin_create(
:type private_endpoint_connection_name: str
:param private_endpoint_connection: The private endpoint connection with updated properties.
Required.
- :type private_endpoint_connection: IO
+ :type private_endpoint_connection: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PrivateEndpointConnection or the
result of cls(response)
:rtype:
@@ -381,7 +361,7 @@ async def begin_create(
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
- private_endpoint_connection: Union[_models.PrivateEndpointConnection, IO],
+ private_endpoint_connection: Union[_models.PrivateEndpointConnection, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.PrivateEndpointConnection]:
"""Update private endpoint connection status.
@@ -396,20 +376,9 @@ async def begin_create(
:param private_endpoint_connection_name: The name of the private endpoint connection. Required.
:type private_endpoint_connection_name: str
:param private_endpoint_connection: The private endpoint connection with updated properties. Is
- either a PrivateEndpointConnection type or a IO type. Required.
+ either a PrivateEndpointConnection type or a IO[bytes] type. Required.
:type private_endpoint_connection: ~azure.mgmt.databricks.models.PrivateEndpointConnection or
- IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ IO[bytes]
:return: An instance of AsyncLROPoller that returns either PrivateEndpointConnection or the
result of cls(response)
:rtype:
@@ -419,7 +388,7 @@ async def begin_create(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
@@ -438,12 +407,13 @@ async def begin_create(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
+ deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -453,22 +423,20 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.PrivateEndpointConnection].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"
- }
+ return AsyncLROPoller[_models.PrivateEndpointConnection](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
+ async def _delete_initial(
self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -479,40 +447,43 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(
@@ -529,14 +500,6 @@ async def begin_delete(
:type workspace_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection. Required.
:type private_endpoint_connection_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -544,13 +507,13 @@ async def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
@@ -560,11 +523,12 @@ async def begin_delete(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
@@ -573,14 +537,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"
- }
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_link_resources_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_link_resources_operations.py
index 292b13ae31ac..7b1e8366431b 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_link_resources_operations.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_link_resources_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,7 +5,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
+import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
@@ -18,17 +19,19 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._private_link_resources_operations import build_get_request, build_list_request
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -65,7 +68,6 @@ def list(
:type resource_group_name: str
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either GroupIdInformation or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.GroupIdInformation]
@@ -74,10 +76,10 @@ def list(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.PrivateLinkResourcesList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -88,24 +90,32 @@ def list(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("PrivateLinkResourcesList", pipeline_response)
@@ -115,11 +125,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -132,10 +142,6 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources"
- }
-
@distributed_trace_async
async def get(
self, resource_group_name: str, workspace_name: str, group_id: str, **kwargs: Any
@@ -151,12 +157,11 @@ async def get(
:type workspace_name: str
:param group_id: The name of the private link resource. Required.
:type group_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: GroupIdInformation or the result of cls(response)
:rtype: ~azure.mgmt.databricks.models.GroupIdInformation
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -167,25 +172,23 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.GroupIdInformation] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
group_id=group_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -195,13 +198,9 @@ async def get(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("GroupIdInformation", pipeline_response)
+ deserialized = self._deserialize("GroupIdInformation", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources/{groupId}"
- }
+ return deserialized # type: ignore
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_vnet_peering_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_vnet_peering_operations.py
index 02a4ff40a45c..fd5c576d558e 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_vnet_peering_operations.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_vnet_peering_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,9 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
@@ -16,12 +17,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -29,7 +31,6 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._vnet_peering_operations import (
build_create_or_update_request,
build_delete_request,
@@ -37,6 +38,10 @@
build_list_by_workspace_request,
)
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -73,12 +78,11 @@ async def get(
:type workspace_name: str
:param peering_name: The name of the workspace vNet peering. Required.
:type peering_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkPeering or None or the result of cls(response)
:rtype: ~azure.mgmt.databricks.models.VirtualNetworkPeering or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -89,25 +93,23 @@ async def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[Optional[_models.VirtualNetworkPeering]] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
peering_name=peering_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -119,21 +121,17 @@ async def get(
deserialized = None
if response.status_code == 200:
- deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response)
+ deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"
- }
+ return deserialized # type: ignore
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
+ async def _delete_initial(
self, resource_group_name: str, workspace_name: str, peering_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -144,40 +142,43 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
peering_name=peering_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"
- }
+ return deserialized # type: ignore
@distributed_trace_async
async def begin_delete(
@@ -192,14 +193,6 @@ async def begin_delete(
:type workspace_name: str
:param peering_name: The name of the workspace vNet peering. Required.
:type peering_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -207,13 +200,13 @@ async def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
peering_name=peering_name,
@@ -223,11 +216,12 @@ async def begin_delete(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
@@ -236,27 +230,23 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"
- }
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
workspace_name: str,
peering_name: str,
- virtual_network_peering_parameters: Union[_models.VirtualNetworkPeering, IO],
+ virtual_network_peering_parameters: Union[_models.VirtualNetworkPeering, IO[bytes]],
**kwargs: Any
- ) -> _models.VirtualNetworkPeering:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -267,9 +257,9 @@ async def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.VirtualNetworkPeering] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -279,7 +269,7 @@ async def _create_or_update_initial(
else:
_json = self._serialize.body(virtual_network_peering_parameters, "VirtualNetworkPeering")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
peering_name=peering_name,
@@ -288,40 +278,35 @@ async def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"
- }
-
@overload
async def begin_create_or_update(
self,
@@ -348,14 +333,6 @@ async def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetworkPeering or the result
of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.VirtualNetworkPeering]
@@ -368,7 +345,7 @@ async def begin_create_or_update(
resource_group_name: str,
workspace_name: str,
peering_name: str,
- virtual_network_peering_parameters: IO,
+ virtual_network_peering_parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -384,18 +361,10 @@ async def begin_create_or_update(
:type peering_name: str
:param virtual_network_peering_parameters: Parameters supplied to the create workspace vNet
Peering. Required.
- :type virtual_network_peering_parameters: IO
+ :type virtual_network_peering_parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetworkPeering or the result
of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.VirtualNetworkPeering]
@@ -408,7 +377,7 @@ async def begin_create_or_update(
resource_group_name: str,
workspace_name: str,
peering_name: str,
- virtual_network_peering_parameters: Union[_models.VirtualNetworkPeering, IO],
+ virtual_network_peering_parameters: Union[_models.VirtualNetworkPeering, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.VirtualNetworkPeering]:
"""Creates vNet Peering for workspace.
@@ -421,20 +390,9 @@ async def begin_create_or_update(
:param peering_name: The name of the workspace vNet peering. Required.
:type peering_name: str
:param virtual_network_peering_parameters: Parameters supplied to the create workspace vNet
- Peering. Is either a VirtualNetworkPeering type or a IO type. Required.
+ Peering. Is either a VirtualNetworkPeering type or a IO[bytes] type. Required.
:type virtual_network_peering_parameters: ~azure.mgmt.databricks.models.VirtualNetworkPeering
- or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ or IO[bytes]
:return: An instance of AsyncLROPoller that returns either VirtualNetworkPeering or the result
of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.VirtualNetworkPeering]
@@ -443,7 +401,7 @@ async def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.VirtualNetworkPeering] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
@@ -462,12 +420,13 @@ async def begin_create_or_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response)
+ deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -477,17 +436,15 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.VirtualNetworkPeering].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"
- }
+ return AsyncLROPoller[_models.VirtualNetworkPeering](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace
def list_by_workspace(
@@ -500,7 +457,6 @@ def list_by_workspace(
:type resource_group_name: str
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkPeering or the result of
cls(response)
:rtype:
@@ -510,10 +466,10 @@ def list_by_workspace(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.VirtualNetworkPeeringList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -524,24 +480,32 @@ def list_by_workspace(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_workspace_request(
+ _request = build_list_by_workspace_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_workspace.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("VirtualNetworkPeeringList", pipeline_response)
@@ -551,11 +515,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -567,7 +531,3 @@ async def get_next(next_link=None):
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
-
- list_by_workspace.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings"
- }
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_workspaces_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_workspaces_operations.py
index b2a46117a8d3..62551b0b5069 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_workspaces_operations.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_workspaces_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,9 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
+import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
@@ -16,12 +17,13 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
@@ -29,7 +31,6 @@
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
-from ..._vendor import _convert_request
from ...operations._workspaces_operations import (
build_create_or_update_request,
build_delete_request,
@@ -39,6 +40,10 @@
build_update_request,
)
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -71,12 +76,11 @@ async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any
:type resource_group_name: str
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Workspace or the result of cls(response)
:rtype: ~azure.mgmt.databricks.models.Workspace
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -87,24 +91,22 @@ async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.Workspace] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -114,21 +116,17 @@ async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("Workspace", pipeline_response)
+ deserialized = self._deserialize("Workspace", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"
- }
+ return deserialized # type: ignore
- async def _delete_initial( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, workspace_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ async def _delete_initial(
+ self, resource_group_name: str, workspace_name: str, force_deletion: bool = False, **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -139,42 +137,48 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
+ force_deletion=force_deletion,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"
- }
+ return deserialized # type: ignore
@distributed_trace_async
- async def begin_delete(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> AsyncLROPoller[None]:
+ async def begin_delete(
+ self, resource_group_name: str, workspace_name: str, force_deletion: bool = False, **kwargs: Any
+ ) -> AsyncLROPoller[None]:
"""Deletes the workspace.
:param resource_group_name: The name of the resource group. The name is case insensitive.
@@ -182,14 +186,9 @@ async def begin_delete(self, resource_group_name: str, workspace_name: str, **kw
:type resource_group_name: str
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param force_deletion: Optional parameter to retain default unity catalog data. By default the
+ data will retained if Uc is enabled on the workspace. Default value is False.
+ :type force_deletion: bool
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -197,26 +196,28 @@ async def begin_delete(self, resource_group_name: str, workspace_name: str, **kw
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = await self._delete_initial( # type: ignore
+ raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
+ force_deletion=force_deletion,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
@@ -225,22 +226,22 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"
- }
+ return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
async def _create_or_update_initial(
- self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any
- ) -> _models.Workspace:
- error_map = {
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: Union[_models.Workspace, IO[bytes]],
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -251,9 +252,9 @@ async def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.Workspace] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -263,7 +264,7 @@ async def _create_or_update_initial(
else:
_json = self._serialize.body(parameters, "Workspace")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
@@ -271,40 +272,35 @@ async def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("Workspace", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("Workspace", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"
- }
-
@overload
async def begin_create_or_update(
self,
@@ -327,14 +323,6 @@ async def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Workspace or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.Workspace]
@@ -346,7 +334,7 @@ async def begin_create_or_update(
self,
resource_group_name: str,
workspace_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -359,18 +347,10 @@ async def begin_create_or_update(
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
:param parameters: Parameters supplied to the create or update a workspace. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Workspace or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.Workspace]
@@ -379,7 +359,11 @@ async def begin_create_or_update(
@distributed_trace_async
async def begin_create_or_update(
- self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: Union[_models.Workspace, IO[bytes]],
+ **kwargs: Any
) -> AsyncLROPoller[_models.Workspace]:
"""Creates a new workspace.
@@ -389,19 +373,8 @@ async def begin_create_or_update(
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
:param parameters: Parameters supplied to the create or update a workspace. Is either a
- Workspace type or a IO type. Required.
- :type parameters: ~azure.mgmt.databricks.models.Workspace or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ Workspace type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.databricks.models.Workspace or IO[bytes]
:return: An instance of AsyncLROPoller that returns either Workspace or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.Workspace]
@@ -410,7 +383,7 @@ async def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Workspace] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
@@ -428,12 +401,13 @@ async def begin_create_or_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("Workspace", pipeline_response)
+ deserialized = self._deserialize("Workspace", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -443,26 +417,24 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.Workspace].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"
- }
+ return AsyncLROPoller[_models.Workspace](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
async def _update_initial(
self,
resource_group_name: str,
workspace_name: str,
- parameters: Union[_models.WorkspaceUpdate, IO],
+ parameters: Union[_models.WorkspaceUpdate, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.Workspace]:
- error_map = {
+ ) -> AsyncIterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -473,9 +445,9 @@ async def _update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.Workspace]] = kwargs.pop("cls", None)
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -485,7 +457,7 @@ async def _update_initial(
else:
_json = self._serialize.body(parameters, "WorkspaceUpdate")
- request = build_update_request(
+ _request = build_update_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
@@ -493,37 +465,34 @@ async def _update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize("Workspace", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"
- }
+ return deserialized # type: ignore
@overload
async def begin_update(
@@ -547,14 +516,6 @@ async def begin_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Workspace or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.Workspace]
@@ -566,7 +527,7 @@ async def begin_update(
self,
resource_group_name: str,
workspace_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -579,18 +540,10 @@ async def begin_update(
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
:param parameters: The update to the workspace. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Workspace or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.Workspace]
@@ -602,7 +555,7 @@ async def begin_update(
self,
resource_group_name: str,
workspace_name: str,
- parameters: Union[_models.WorkspaceUpdate, IO],
+ parameters: Union[_models.WorkspaceUpdate, IO[bytes]],
**kwargs: Any
) -> AsyncLROPoller[_models.Workspace]:
"""Updates a workspace.
@@ -612,20 +565,9 @@ async def begin_update(
:type resource_group_name: str
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
- :param parameters: The update to the workspace. Is either a WorkspaceUpdate type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.databricks.models.WorkspaceUpdate or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
- this operation to not poll, or pass in your own initialized polling object for a personal
- polling strategy.
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: The update to the workspace. Is either a WorkspaceUpdate type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.databricks.models.WorkspaceUpdate or IO[bytes]
:return: An instance of AsyncLROPoller that returns either Workspace or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.Workspace]
@@ -634,7 +576,7 @@ async def begin_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Workspace] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
@@ -652,12 +594,13 @@ async def begin_update(
params=_params,
**kwargs
)
+ await raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("Workspace", pipeline_response)
+ deserialized = self._deserialize("Workspace", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -667,17 +610,15 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return AsyncLROPoller.from_continuation_token(
+ return AsyncLROPoller[_models.Workspace].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"
- }
+ return AsyncLROPoller[_models.Workspace](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace
def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncIterable["_models.Workspace"]:
@@ -686,7 +627,6 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Workspace or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.Workspace]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -694,10 +634,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -708,23 +648,31 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_resource_group_request(
+ _request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("WorkspaceListResult", pipeline_response)
@@ -734,11 +682,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -751,15 +699,10 @@ async def get_next(next_link=None):
return AsyncItemPaged(get_next, extract_data)
- list_by_resource_group.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces"
- }
-
@distributed_trace
def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.Workspace"]:
"""Gets all the workspaces within a subscription.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Workspace or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.Workspace]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -767,10 +710,10 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.Workspac
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -781,22 +724,30 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.Workspac
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_subscription_request(
+ _request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_subscription.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
async def extract_data(pipeline_response):
deserialized = self._deserialize("WorkspaceListResult", pipeline_response)
@@ -806,11 +757,11 @@ async def extract_data(pipeline_response):
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -822,5 +773,3 @@ async def get_next(next_link=None):
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
-
- list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Databricks/workspaces"}
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/__init__.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/__init__.py
index ac37825003f8..a801d19a453f 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/__init__.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/__init__.py
@@ -5,72 +5,97 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._models_py3 import AccessConnector
-from ._models_py3 import AccessConnectorListResult
-from ._models_py3 import AccessConnectorProperties
-from ._models_py3 import AccessConnectorUpdate
-from ._models_py3 import AddressSpace
-from ._models_py3 import CreatedBy
-from ._models_py3 import Encryption
-from ._models_py3 import EncryptionEntitiesDefinition
-from ._models_py3 import EncryptionV2
-from ._models_py3 import EncryptionV2KeyVaultProperties
-from ._models_py3 import EndpointDependency
-from ._models_py3 import EndpointDetail
-from ._models_py3 import ErrorDetail
-from ._models_py3 import ErrorInfo
-from ._models_py3 import ErrorResponse
-from ._models_py3 import GroupIdInformation
-from ._models_py3 import GroupIdInformationProperties
-from ._models_py3 import ManagedDiskEncryption
-from ._models_py3 import ManagedDiskEncryptionKeyVaultProperties
-from ._models_py3 import ManagedIdentityConfiguration
-from ._models_py3 import ManagedServiceIdentity
-from ._models_py3 import Operation
-from ._models_py3 import OperationDisplay
-from ._models_py3 import OperationListResult
-from ._models_py3 import OutboundEnvironmentEndpoint
-from ._models_py3 import PrivateEndpoint
-from ._models_py3 import PrivateEndpointConnection
-from ._models_py3 import PrivateEndpointConnectionProperties
-from ._models_py3 import PrivateEndpointConnectionsList
-from ._models_py3 import PrivateLinkResourcesList
-from ._models_py3 import PrivateLinkServiceConnectionState
-from ._models_py3 import Resource
-from ._models_py3 import Sku
-from ._models_py3 import SystemData
-from ._models_py3 import TrackedResource
-from ._models_py3 import UserAssignedIdentity
-from ._models_py3 import VirtualNetworkPeering
-from ._models_py3 import VirtualNetworkPeeringList
-from ._models_py3 import VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork
-from ._models_py3 import VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork
-from ._models_py3 import Workspace
-from ._models_py3 import WorkspaceCustomBooleanParameter
-from ._models_py3 import WorkspaceCustomObjectParameter
-from ._models_py3 import WorkspaceCustomParameters
-from ._models_py3 import WorkspaceCustomStringParameter
-from ._models_py3 import WorkspaceEncryptionParameter
-from ._models_py3 import WorkspaceListResult
-from ._models_py3 import WorkspacePropertiesEncryption
-from ._models_py3 import WorkspaceProviderAuthorization
-from ._models_py3 import WorkspaceUpdate
+from typing import TYPE_CHECKING
-from ._azure_databricks_management_client_enums import CreatedByType
-from ._azure_databricks_management_client_enums import CustomParameterType
-from ._azure_databricks_management_client_enums import EncryptionKeySource
-from ._azure_databricks_management_client_enums import KeySource
-from ._azure_databricks_management_client_enums import ManagedServiceIdentityType
-from ._azure_databricks_management_client_enums import PeeringProvisioningState
-from ._azure_databricks_management_client_enums import PeeringState
-from ._azure_databricks_management_client_enums import PrivateEndpointConnectionProvisioningState
-from ._azure_databricks_management_client_enums import PrivateLinkServiceConnectionStatus
-from ._azure_databricks_management_client_enums import ProvisioningState
-from ._azure_databricks_management_client_enums import PublicNetworkAccess
-from ._azure_databricks_management_client_enums import RequiredNsgRules
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+
+from ._models_py3 import ( # type: ignore
+ AccessConnector,
+ AccessConnectorListResult,
+ AccessConnectorProperties,
+ AccessConnectorUpdate,
+ AddressSpace,
+ AutomaticClusterUpdateDefinition,
+ ComplianceSecurityProfileDefinition,
+ CreatedBy,
+ DefaultCatalogProperties,
+ Encryption,
+ EncryptionEntitiesDefinition,
+ EncryptionV2,
+ EncryptionV2KeyVaultProperties,
+ EndpointDependency,
+ EndpointDetail,
+ EnhancedSecurityComplianceDefinition,
+ EnhancedSecurityMonitoringDefinition,
+ ErrorDetail,
+ ErrorInfo,
+ ErrorResponse,
+ GroupIdInformation,
+ GroupIdInformationProperties,
+ ManagedDiskEncryption,
+ ManagedDiskEncryptionKeyVaultProperties,
+ ManagedIdentityConfiguration,
+ ManagedServiceIdentity,
+ Operation,
+ OperationDisplay,
+ OperationListResult,
+ OutboundEnvironmentEndpoint,
+ PrivateEndpoint,
+ PrivateEndpointConnection,
+ PrivateEndpointConnectionProperties,
+ PrivateEndpointConnectionsList,
+ PrivateLinkResourcesList,
+ PrivateLinkServiceConnectionState,
+ Resource,
+ Sku,
+ SystemData,
+ TrackedResource,
+ UserAssignedIdentity,
+ VirtualNetworkPeering,
+ VirtualNetworkPeeringList,
+ VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork,
+ VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork,
+ Workspace,
+ WorkspaceCustomBooleanParameter,
+ WorkspaceCustomObjectParameter,
+ WorkspaceCustomParameters,
+ WorkspaceCustomStringParameter,
+ WorkspaceEncryptionParameter,
+ WorkspaceListResult,
+ WorkspaceNoPublicIPBooleanParameter,
+ WorkspacePropertiesAccessConnector,
+ WorkspacePropertiesEncryption,
+ WorkspaceProviderAuthorization,
+ WorkspaceUpdate,
+)
+
+from ._azure_databricks_management_client_enums import ( # type: ignore
+ AutomaticClusterUpdateValue,
+ ComplianceSecurityProfileValue,
+ ComplianceStandard,
+ CreatedByType,
+ CustomParameterType,
+ DefaultStorageFirewall,
+ EncryptionKeySource,
+ EnhancedSecurityMonitoringValue,
+ IdentityType,
+ InitialType,
+ KeySource,
+ ManagedServiceIdentityType,
+ PeeringProvisioningState,
+ PeeringState,
+ PrivateEndpointConnectionProvisioningState,
+ PrivateLinkServiceConnectionStatus,
+ ProvisioningState,
+ PublicNetworkAccess,
+ RequiredNsgRules,
+)
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -79,13 +104,18 @@
"AccessConnectorProperties",
"AccessConnectorUpdate",
"AddressSpace",
+ "AutomaticClusterUpdateDefinition",
+ "ComplianceSecurityProfileDefinition",
"CreatedBy",
+ "DefaultCatalogProperties",
"Encryption",
"EncryptionEntitiesDefinition",
"EncryptionV2",
"EncryptionV2KeyVaultProperties",
"EndpointDependency",
"EndpointDetail",
+ "EnhancedSecurityComplianceDefinition",
+ "EnhancedSecurityMonitoringDefinition",
"ErrorDetail",
"ErrorInfo",
"ErrorResponse",
@@ -121,12 +151,21 @@
"WorkspaceCustomStringParameter",
"WorkspaceEncryptionParameter",
"WorkspaceListResult",
+ "WorkspaceNoPublicIPBooleanParameter",
+ "WorkspacePropertiesAccessConnector",
"WorkspacePropertiesEncryption",
"WorkspaceProviderAuthorization",
"WorkspaceUpdate",
+ "AutomaticClusterUpdateValue",
+ "ComplianceSecurityProfileValue",
+ "ComplianceStandard",
"CreatedByType",
"CustomParameterType",
+ "DefaultStorageFirewall",
"EncryptionKeySource",
+ "EnhancedSecurityMonitoringValue",
+ "IdentityType",
+ "InitialType",
"KeySource",
"ManagedServiceIdentityType",
"PeeringProvisioningState",
@@ -137,5 +176,5 @@
"PublicNetworkAccess",
"RequiredNsgRules",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_azure_databricks_management_client_enums.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_azure_databricks_management_client_enums.py
index d2c537b8f206..1a61fcb3554d 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_azure_databricks_management_client_enums.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_azure_databricks_management_client_enums.py
@@ -10,6 +10,34 @@
from azure.core import CaseInsensitiveEnumMeta
+class AutomaticClusterUpdateValue(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """AutomaticClusterUpdateValue."""
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+
+
+class ComplianceSecurityProfileValue(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """ComplianceSecurityProfileValue."""
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+
+
+class ComplianceStandard(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Compliance standard that can be associated with a workspace."""
+
+ NONE = "NONE"
+ HIPAA = "HIPAA"
+ PCI_DSS = "PCI_DSS"
+ CYBER_ESSENTIAL_PLUS = "CYBER_ESSENTIAL_PLUS"
+ FEDRAMP_HIGH = "FEDRAMP_HIGH"
+ CANADA_PROTECTED_B = "CANADA_PROTECTED_B"
+ IRAP_PROTECTED = "IRAP_PROTECTED"
+ ISMAP = "ISMAP"
+ HITRUST = "HITRUST"
+
+
class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The type of identity that created the resource."""
@@ -27,12 +55,42 @@ class CustomParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
STRING = "String"
+class DefaultStorageFirewall(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Gets or Sets Default Storage Firewall configuration information."""
+
+ DISABLED = "Disabled"
+ ENABLED = "Enabled"
+
+
class EncryptionKeySource(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The encryption keySource (provider). Possible values (case-insensitive): Microsoft.Keyvault."""
MICROSOFT_KEYVAULT = "Microsoft.Keyvault"
+class EnhancedSecurityMonitoringValue(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """EnhancedSecurityMonitoringValue."""
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+
+
+class IdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """The identity type of the Access Connector Resource."""
+
+ SYSTEM_ASSIGNED = "SystemAssigned"
+ USER_ASSIGNED = "UserAssigned"
+
+
+class InitialType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Defines the initial type of the default catalog. Possible values (case-insensitive):
+ HiveMetastore, UnityCatalog.
+ """
+
+ HIVE_METASTORE = "HiveMetastore"
+ UNITY_CATALOG = "UnityCatalog"
+
+
class KeySource(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The encryption keySource (provider). Possible values (case-insensitive): Default,
Microsoft.Keyvault.
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models_py3.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models_py3.py
index 9e28712ebc0b..6be554fc531b 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models_py3.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models_py3.py
@@ -1,5 +1,5 @@
-# coding=utf-8
# pylint: disable=too-many-lines
+# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
@@ -16,10 +16,9 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from .. import models as _models
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
@@ -30,7 +29,7 @@ class Resource(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Fully qualified resource Id for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -64,10 +63,10 @@ class TrackedResource(Resource):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Fully qualified resource Id for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -108,14 +107,14 @@ def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kw
class AccessConnector(TrackedResource):
- """Information about azure databricks accessConnector.
+ """Information about Azure Databricks Access Connector.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Fully qualified resource Id for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -130,7 +129,7 @@ class AccessConnector(TrackedResource):
:vartype identity: ~azure.mgmt.databricks.models.ManagedServiceIdentity
:ivar system_data: The system metadata relating to this resource.
:vartype system_data: ~azure.mgmt.databricks.models.SystemData
- :ivar properties: Azure Databricks accessConnector properties.
+ :ivar properties: Azure Databricks Access Connector properties.
:vartype properties: ~azure.mgmt.databricks.models.AccessConnectorProperties
"""
@@ -169,7 +168,7 @@ def __init__(
:paramtype location: str
:keyword identity: Managed service identity (system assigned and/or user assigned identities).
:paramtype identity: ~azure.mgmt.databricks.models.ManagedServiceIdentity
- :keyword properties: Azure Databricks accessConnector properties.
+ :keyword properties: Azure Databricks Access Connector properties.
:paramtype properties: ~azure.mgmt.databricks.models.AccessConnectorProperties
"""
super().__init__(tags=tags, location=location, **kwargs)
@@ -179,9 +178,9 @@ def __init__(
class AccessConnectorListResult(_serialization.Model):
- """List of azure databricks accessConnector.
+ """List of Azure Databricks Access Connector.
- :ivar value: The array of azure databricks accessConnector.
+ :ivar value: The array of Azure Databricks Access Connector.
:vartype value: list[~azure.mgmt.databricks.models.AccessConnector]
:ivar next_link: The URL to use for getting the next set of results.
:vartype next_link: str
@@ -196,7 +195,7 @@ def __init__(
self, *, value: Optional[List["_models.AccessConnector"]] = None, next_link: Optional[str] = None, **kwargs: Any
) -> None:
"""
- :keyword value: The array of azure databricks accessConnector.
+ :keyword value: The array of Azure Databricks Access Connector.
:paramtype value: list[~azure.mgmt.databricks.models.AccessConnector]
:keyword next_link: The URL to use for getting the next set of results.
:paramtype next_link: str
@@ -211,28 +210,33 @@ class AccessConnectorProperties(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- :ivar provisioning_state: Provisioning status of the accessConnector. Known values are:
+ :ivar provisioning_state: Provisioning status of the Access Connector. Known values are:
"Accepted", "Running", "Ready", "Creating", "Created", "Deleting", "Deleted", "Canceled",
"Failed", "Succeeded", and "Updating".
:vartype provisioning_state: str or ~azure.mgmt.databricks.models.ProvisioningState
+ :ivar refered_by: List of workspaces referring this Access Connector.
+ :vartype refered_by: list[str]
"""
_validation = {
"provisioning_state": {"readonly": True},
+ "refered_by": {"readonly": True},
}
_attribute_map = {
"provisioning_state": {"key": "provisioningState", "type": "str"},
+ "refered_by": {"key": "referedBy", "type": "[str]"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.provisioning_state = None
+ self.refered_by = None
class AccessConnectorUpdate(_serialization.Model):
- """An update to an azure databricks accessConnector.
+ """An update to an Azure Databricks Access Connector.
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
@@ -286,6 +290,60 @@ def __init__(self, *, address_prefixes: Optional[List[str]] = None, **kwargs: An
self.address_prefixes = address_prefixes
+class AutomaticClusterUpdateDefinition(_serialization.Model):
+ """Status of automated cluster updates feature.
+
+ :ivar value: Known values are: "Enabled" and "Disabled".
+ :vartype value: str or ~azure.mgmt.databricks.models.AutomaticClusterUpdateValue
+ """
+
+ _attribute_map = {
+ "value": {"key": "value", "type": "str"},
+ }
+
+ def __init__(
+ self, *, value: Optional[Union[str, "_models.AutomaticClusterUpdateValue"]] = None, **kwargs: Any
+ ) -> None:
+ """
+ :keyword value: Known values are: "Enabled" and "Disabled".
+ :paramtype value: str or ~azure.mgmt.databricks.models.AutomaticClusterUpdateValue
+ """
+ super().__init__(**kwargs)
+ self.value = value
+
+
+class ComplianceSecurityProfileDefinition(_serialization.Model):
+ """Status of Compliance Security Profile feature.
+
+ :ivar compliance_standards: Compliance standards associated with the workspace.
+ :vartype compliance_standards: list[str or ~azure.mgmt.databricks.models.ComplianceStandard]
+ :ivar value: Known values are: "Enabled" and "Disabled".
+ :vartype value: str or ~azure.mgmt.databricks.models.ComplianceSecurityProfileValue
+ """
+
+ _attribute_map = {
+ "compliance_standards": {"key": "complianceStandards", "type": "[str]"},
+ "value": {"key": "value", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ compliance_standards: Optional[List[Union[str, "_models.ComplianceStandard"]]] = None,
+ value: Optional[Union[str, "_models.ComplianceSecurityProfileValue"]] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword compliance_standards: Compliance standards associated with the workspace.
+ :paramtype compliance_standards: list[str or ~azure.mgmt.databricks.models.ComplianceStandard]
+ :keyword value: Known values are: "Enabled" and "Disabled".
+ :paramtype value: str or ~azure.mgmt.databricks.models.ComplianceSecurityProfileValue
+ """
+ super().__init__(**kwargs)
+ self.compliance_standards = compliance_standards
+ self.value = value
+
+
class CreatedBy(_serialization.Model):
"""Provides details of the entity that created/updated the workspace.
@@ -320,6 +378,44 @@ def __init__(self, **kwargs: Any) -> None:
self.application_id = None
+class DefaultCatalogProperties(_serialization.Model):
+ """These properties lets user specify default catalog properties during workspace creation.
+
+ :ivar initial_type: Defines the initial type of the default catalog. Possible values
+ (case-insensitive): HiveMetastore, UnityCatalog. Known values are: "HiveMetastore" and
+ "UnityCatalog".
+ :vartype initial_type: str or ~azure.mgmt.databricks.models.InitialType
+ :ivar initial_name: Specifies the initial Name of default catalog. If not specified, the name
+ of the workspace will be used.
+ :vartype initial_name: str
+ """
+
+ _attribute_map = {
+ "initial_type": {"key": "initialType", "type": "str"},
+ "initial_name": {"key": "initialName", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ initial_type: Union[str, "_models.InitialType"] = "HiveMetastore",
+ initial_name: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword initial_type: Defines the initial type of the default catalog. Possible values
+ (case-insensitive): HiveMetastore, UnityCatalog. Known values are: "HiveMetastore" and
+ "UnityCatalog".
+ :paramtype initial_type: str or ~azure.mgmt.databricks.models.InitialType
+ :keyword initial_name: Specifies the initial Name of default catalog. If not specified, the
+ name of the workspace will be used.
+ :paramtype initial_name: str
+ """
+ super().__init__(**kwargs)
+ self.initial_type = initial_type
+ self.initial_name = initial_name
+
+
class Encryption(_serialization.Model):
"""The object that contains details of encryption used on the workspace.
@@ -403,7 +499,7 @@ def __init__(
class EncryptionV2(_serialization.Model):
"""The object that contains details of encryption used on the workspace.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar key_source: The encryption keySource (provider). Possible values (case-insensitive):
Microsoft.Keyvault. Required. "Microsoft.Keyvault"
@@ -443,7 +539,7 @@ def __init__(
class EncryptionV2KeyVaultProperties(_serialization.Model):
"""Key Vault input properties for encryption.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar key_vault_uri: The Uri of KeyVault. Required.
:vartype key_vault_uri: str
@@ -562,10 +658,83 @@ def __init__(
self.is_accessible = is_accessible
+class EnhancedSecurityComplianceDefinition(_serialization.Model):
+ """Status of settings related to the Enhanced Security and Compliance Add-On.
+
+ :ivar automatic_cluster_update: Status of automated cluster updates feature.
+ :vartype automatic_cluster_update:
+ ~azure.mgmt.databricks.models.AutomaticClusterUpdateDefinition
+ :ivar compliance_security_profile: Status of Compliance Security Profile feature.
+ :vartype compliance_security_profile:
+ ~azure.mgmt.databricks.models.ComplianceSecurityProfileDefinition
+ :ivar enhanced_security_monitoring: Status of Enhanced Security Monitoring feature.
+ :vartype enhanced_security_monitoring:
+ ~azure.mgmt.databricks.models.EnhancedSecurityMonitoringDefinition
+ """
+
+ _attribute_map = {
+ "automatic_cluster_update": {"key": "automaticClusterUpdate", "type": "AutomaticClusterUpdateDefinition"},
+ "compliance_security_profile": {
+ "key": "complianceSecurityProfile",
+ "type": "ComplianceSecurityProfileDefinition",
+ },
+ "enhanced_security_monitoring": {
+ "key": "enhancedSecurityMonitoring",
+ "type": "EnhancedSecurityMonitoringDefinition",
+ },
+ }
+
+ def __init__(
+ self,
+ *,
+ automatic_cluster_update: Optional["_models.AutomaticClusterUpdateDefinition"] = None,
+ compliance_security_profile: Optional["_models.ComplianceSecurityProfileDefinition"] = None,
+ enhanced_security_monitoring: Optional["_models.EnhancedSecurityMonitoringDefinition"] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword automatic_cluster_update: Status of automated cluster updates feature.
+ :paramtype automatic_cluster_update:
+ ~azure.mgmt.databricks.models.AutomaticClusterUpdateDefinition
+ :keyword compliance_security_profile: Status of Compliance Security Profile feature.
+ :paramtype compliance_security_profile:
+ ~azure.mgmt.databricks.models.ComplianceSecurityProfileDefinition
+ :keyword enhanced_security_monitoring: Status of Enhanced Security Monitoring feature.
+ :paramtype enhanced_security_monitoring:
+ ~azure.mgmt.databricks.models.EnhancedSecurityMonitoringDefinition
+ """
+ super().__init__(**kwargs)
+ self.automatic_cluster_update = automatic_cluster_update
+ self.compliance_security_profile = compliance_security_profile
+ self.enhanced_security_monitoring = enhanced_security_monitoring
+
+
+class EnhancedSecurityMonitoringDefinition(_serialization.Model):
+ """Status of Enhanced Security Monitoring feature.
+
+ :ivar value: Known values are: "Enabled" and "Disabled".
+ :vartype value: str or ~azure.mgmt.databricks.models.EnhancedSecurityMonitoringValue
+ """
+
+ _attribute_map = {
+ "value": {"key": "value", "type": "str"},
+ }
+
+ def __init__(
+ self, *, value: Optional[Union[str, "_models.EnhancedSecurityMonitoringValue"]] = None, **kwargs: Any
+ ) -> None:
+ """
+ :keyword value: Known values are: "Enabled" and "Disabled".
+ :paramtype value: str or ~azure.mgmt.databricks.models.EnhancedSecurityMonitoringValue
+ """
+ super().__init__(**kwargs)
+ self.value = value
+
+
class ErrorDetail(_serialization.Model):
"""Error details.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar code: The error's code. Required.
:vartype code: str
@@ -604,7 +773,7 @@ def __init__(self, *, code: str, message: str, target: Optional[str] = None, **k
class ErrorInfo(_serialization.Model):
"""The code and message for an error.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar code: A machine readable error code. Required.
:vartype code: str
@@ -657,7 +826,7 @@ def __init__(
class ErrorResponse(_serialization.Model):
"""Contains details when the response code indicates an error.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar error: The error details. Required.
:vartype error: ~azure.mgmt.databricks.models.ErrorInfo
@@ -685,10 +854,10 @@ class GroupIdInformation(Resource):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Fully qualified resource Id for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -764,7 +933,7 @@ def __init__(
class ManagedDiskEncryption(_serialization.Model):
"""The object that contains details of encryption used on the workspace.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar key_source: The encryption keySource (provider). Possible values (case-insensitive):
Microsoft.Keyvault. Required. "Microsoft.Keyvault"
@@ -816,7 +985,7 @@ def __init__(
class ManagedDiskEncryptionKeyVaultProperties(_serialization.Model):
"""Key Vault input properties for encryption.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar key_vault_uri: The URI of KeyVault. Required.
:vartype key_vault_uri: str
@@ -892,7 +1061,7 @@ class ManagedServiceIdentity(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar principal_id: The service principal ID of the system assigned identity. This property
will only be provided for a system assigned identity.
@@ -906,7 +1075,7 @@ class ManagedServiceIdentity(_serialization.Model):
:vartype type: str or ~azure.mgmt.databricks.models.ManagedServiceIdentityType
:ivar user_assigned_identities: The set of user assigned identities associated with the
resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form:
- '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}.
+ '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. # pylint: disable=line-too-long
The dictionary values can be empty objects ({}) in requests.
:vartype user_assigned_identities: dict[str,
~azure.mgmt.databricks.models.UserAssignedIdentity]
@@ -939,7 +1108,7 @@ def __init__(
:paramtype type: str or ~azure.mgmt.databricks.models.ManagedServiceIdentityType
:keyword user_assigned_identities: The set of user assigned identities associated with the
resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form:
- '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}.
+ '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. # pylint: disable=line-too-long
The dictionary values can be empty objects ({}) in requests.
:paramtype user_assigned_identities: dict[str,
~azure.mgmt.databricks.models.UserAssignedIdentity]
@@ -1118,7 +1287,7 @@ class PrivateEndpointConnection(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: The resource identifier.
:vartype id: str
@@ -1161,7 +1330,7 @@ class PrivateEndpointConnectionProperties(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar private_endpoint: Private endpoint.
:vartype private_endpoint: ~azure.mgmt.databricks.models.PrivateEndpoint
@@ -1282,7 +1451,7 @@ def __init__(
class PrivateLinkServiceConnectionState(_serialization.Model):
"""The current state of a private endpoint connection.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar status: The status of a private endpoint connection. Required. Known values are:
"Pending", "Approved", "Rejected", and "Disconnected".
@@ -1329,7 +1498,7 @@ def __init__(
class Sku(_serialization.Model):
"""SKU for the resource.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar name: The SKU name. Required.
:vartype name: str
@@ -1450,12 +1619,12 @@ def __init__(self, **kwargs: Any) -> None:
self.client_id = None
-class VirtualNetworkPeering(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class VirtualNetworkPeering(_serialization.Model):
"""Peerings in a VirtualNetwork resource.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar name: Name of the virtual network peering resource.
:vartype name: str
@@ -1563,7 +1732,7 @@ def __init__(
:paramtype use_remote_gateways: bool
:keyword databricks_virtual_network: The remote virtual network should be in the same region.
See here to learn more
- (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering).
+ (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). # pylint: disable=line-too-long
:paramtype databricks_virtual_network:
~azure.mgmt.databricks.models.VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork
:keyword databricks_address_space: The reference to the databricks virtual network address
@@ -1571,7 +1740,7 @@ def __init__(
:paramtype databricks_address_space: ~azure.mgmt.databricks.models.AddressSpace
:keyword remote_virtual_network: The remote virtual network should be in the same region. See
here to learn more
- (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering).
+ (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). # pylint: disable=line-too-long
Required.
:paramtype remote_virtual_network:
~azure.mgmt.databricks.models.VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork
@@ -1628,7 +1797,9 @@ def __init__(
self.next_link = next_link
-class VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork(_serialization.Model):
+class VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork(
+ _serialization.Model
+): # pylint: disable=name-too-long
"""The remote virtual network should be in the same region. See here to learn more
(https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering).
@@ -1649,7 +1820,7 @@ def __init__(self, *, id: Optional[str] = None, **kwargs: Any) -> None: # pylin
self.id = id
-class VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork(_serialization.Model):
+class VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork(_serialization.Model): # pylint: disable=name-too-long
"""The remote virtual network should be in the same region. See here to learn more
(https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering).
@@ -1670,15 +1841,15 @@ def __init__(self, *, id: Optional[str] = None, **kwargs: Any) -> None: # pylin
self.id = id
-class Workspace(TrackedResource): # pylint: disable=too-many-instance-attributes
+class Workspace(TrackedResource):
"""Information about workspace.
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar id: Fully qualified resource Id for the resource. Ex -
- /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
@@ -1728,6 +1899,10 @@ class Workspace(TrackedResource): # pylint: disable=too-many-instance-attribute
:vartype disk_encryption_set_id: str
:ivar encryption: Encryption properties for databricks workspace.
:vartype encryption: ~azure.mgmt.databricks.models.WorkspacePropertiesEncryption
+ :ivar enhanced_security_compliance: Contains settings related to the Enhanced Security and
+ Compliance Add-On.
+ :vartype enhanced_security_compliance:
+ ~azure.mgmt.databricks.models.EnhancedSecurityComplianceDefinition
:ivar private_endpoint_connections: Private endpoint connections created on the workspace.
:vartype private_endpoint_connections:
list[~azure.mgmt.databricks.models.PrivateEndpointConnection]
@@ -1739,6 +1914,16 @@ class Workspace(TrackedResource): # pylint: disable=too-many-instance-attribute
'NoAzureDatabricksRules'. 'NoAzureServiceRules' value is for internal use only. Known values
are: "AllRules", "NoAzureDatabricksRules", and "NoAzureServiceRules".
:vartype required_nsg_rules: str or ~azure.mgmt.databricks.models.RequiredNsgRules
+ :ivar default_catalog: Properties for Default Catalog configuration during workspace creation.
+ :vartype default_catalog: ~azure.mgmt.databricks.models.DefaultCatalogProperties
+ :ivar is_uc_enabled: Indicates whether unity catalog enabled for the workspace or not.
+ :vartype is_uc_enabled: bool
+ :ivar access_connector: Access Connector Resource that is going to be associated with
+ Databricks Workspace.
+ :vartype access_connector: ~azure.mgmt.databricks.models.WorkspacePropertiesAccessConnector
+ :ivar default_storage_firewall: Gets or Sets Default Storage Firewall configuration
+ information. Known values are: "Disabled" and "Enabled".
+ :vartype default_storage_firewall: str or ~azure.mgmt.databricks.models.DefaultStorageFirewall
"""
_validation = {
@@ -1754,6 +1939,7 @@ class Workspace(TrackedResource): # pylint: disable=too-many-instance-attribute
"workspace_url": {"readonly": True},
"disk_encryption_set_id": {"readonly": True},
"private_endpoint_connections": {"readonly": True},
+ "is_uc_enabled": {"readonly": True},
}
_attribute_map = {
@@ -1781,12 +1967,20 @@ class Workspace(TrackedResource): # pylint: disable=too-many-instance-attribute
"managed_disk_identity": {"key": "properties.managedDiskIdentity", "type": "ManagedIdentityConfiguration"},
"disk_encryption_set_id": {"key": "properties.diskEncryptionSetId", "type": "str"},
"encryption": {"key": "properties.encryption", "type": "WorkspacePropertiesEncryption"},
+ "enhanced_security_compliance": {
+ "key": "properties.enhancedSecurityCompliance",
+ "type": "EnhancedSecurityComplianceDefinition",
+ },
"private_endpoint_connections": {
"key": "properties.privateEndpointConnections",
"type": "[PrivateEndpointConnection]",
},
"public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"},
"required_nsg_rules": {"key": "properties.requiredNsgRules", "type": "str"},
+ "default_catalog": {"key": "properties.defaultCatalog", "type": "DefaultCatalogProperties"},
+ "is_uc_enabled": {"key": "properties.isUcEnabled", "type": "bool"},
+ "access_connector": {"key": "properties.accessConnector", "type": "WorkspacePropertiesAccessConnector"},
+ "default_storage_firewall": {"key": "properties.defaultStorageFirewall", "type": "str"},
}
def __init__( # pylint: disable=too-many-locals
@@ -1804,8 +1998,12 @@ def __init__( # pylint: disable=too-many-locals
storage_account_identity: Optional["_models.ManagedIdentityConfiguration"] = None,
managed_disk_identity: Optional["_models.ManagedIdentityConfiguration"] = None,
encryption: Optional["_models.WorkspacePropertiesEncryption"] = None,
+ enhanced_security_compliance: Optional["_models.EnhancedSecurityComplianceDefinition"] = None,
public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None,
required_nsg_rules: Optional[Union[str, "_models.RequiredNsgRules"]] = None,
+ default_catalog: Optional["_models.DefaultCatalogProperties"] = None,
+ access_connector: Optional["_models.WorkspacePropertiesAccessConnector"] = None,
+ default_storage_firewall: Optional[Union[str, "_models.DefaultStorageFirewall"]] = None,
**kwargs: Any
) -> None:
"""
@@ -1836,6 +2034,10 @@ def __init__( # pylint: disable=too-many-locals
:paramtype managed_disk_identity: ~azure.mgmt.databricks.models.ManagedIdentityConfiguration
:keyword encryption: Encryption properties for databricks workspace.
:paramtype encryption: ~azure.mgmt.databricks.models.WorkspacePropertiesEncryption
+ :keyword enhanced_security_compliance: Contains settings related to the Enhanced Security and
+ Compliance Add-On.
+ :paramtype enhanced_security_compliance:
+ ~azure.mgmt.databricks.models.EnhancedSecurityComplianceDefinition
:keyword public_network_access: The network access type for accessing workspace. Set value to
disabled to access workspace only via private link. Known values are: "Enabled" and "Disabled".
:paramtype public_network_access: str or ~azure.mgmt.databricks.models.PublicNetworkAccess
@@ -1844,6 +2046,16 @@ def __init__( # pylint: disable=too-many-locals
'NoAzureDatabricksRules'. 'NoAzureServiceRules' value is for internal use only. Known values
are: "AllRules", "NoAzureDatabricksRules", and "NoAzureServiceRules".
:paramtype required_nsg_rules: str or ~azure.mgmt.databricks.models.RequiredNsgRules
+ :keyword default_catalog: Properties for Default Catalog configuration during workspace
+ creation.
+ :paramtype default_catalog: ~azure.mgmt.databricks.models.DefaultCatalogProperties
+ :keyword access_connector: Access Connector Resource that is going to be associated with
+ Databricks Workspace.
+ :paramtype access_connector: ~azure.mgmt.databricks.models.WorkspacePropertiesAccessConnector
+ :keyword default_storage_firewall: Gets or Sets Default Storage Firewall configuration
+ information. Known values are: "Disabled" and "Enabled".
+ :paramtype default_storage_firewall: str or
+ ~azure.mgmt.databricks.models.DefaultStorageFirewall
"""
super().__init__(tags=tags, location=location, **kwargs)
self.sku = sku
@@ -1862,9 +2074,14 @@ def __init__( # pylint: disable=too-many-locals
self.managed_disk_identity = managed_disk_identity
self.disk_encryption_set_id = None
self.encryption = encryption
+ self.enhanced_security_compliance = enhanced_security_compliance
self.private_endpoint_connections = None
self.public_network_access = public_network_access
self.required_nsg_rules = required_nsg_rules
+ self.default_catalog = default_catalog
+ self.is_uc_enabled = None
+ self.access_connector = access_connector
+ self.default_storage_firewall = default_storage_firewall
class WorkspaceCustomBooleanParameter(_serialization.Model):
@@ -1872,7 +2089,7 @@ class WorkspaceCustomBooleanParameter(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar type: The type of variable that this is. Known values are: "Bool", "Object", and
"String".
@@ -1906,7 +2123,7 @@ class WorkspaceCustomObjectParameter(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar type: The type of variable that this is. Known values are: "Bool", "Object", and
"String".
@@ -1935,7 +2152,7 @@ def __init__(self, *, value: JSON, **kwargs: Any) -> None:
self.value = value
-class WorkspaceCustomParameters(_serialization.Model): # pylint: disable=too-many-instance-attributes
+class WorkspaceCustomParameters(_serialization.Model):
"""Custom Parameters used for Cluster Creation.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -1953,8 +2170,9 @@ class WorkspaceCustomParameters(_serialization.Model): # pylint: disable=too-ma
:ivar custom_private_subnet_name: The name of the Private Subnet within the Virtual Network.
:vartype custom_private_subnet_name:
~azure.mgmt.databricks.models.WorkspaceCustomStringParameter
- :ivar enable_no_public_ip: Should the Public IP be Disabled?.
- :vartype enable_no_public_ip: ~azure.mgmt.databricks.models.WorkspaceCustomBooleanParameter
+ :ivar enable_no_public_ip: Boolean indicating whether the public IP should be disabled. Default
+ value is true.
+ :vartype enable_no_public_ip: ~azure.mgmt.databricks.models.WorkspaceNoPublicIPBooleanParameter
:ivar load_balancer_backend_pool_name: Name of the outbound Load Balancer Backend Pool for
Secure Cluster Connectivity (No Public IP).
:vartype load_balancer_backend_pool_name:
@@ -2000,7 +2218,7 @@ class WorkspaceCustomParameters(_serialization.Model): # pylint: disable=too-ma
"custom_virtual_network_id": {"key": "customVirtualNetworkId", "type": "WorkspaceCustomStringParameter"},
"custom_public_subnet_name": {"key": "customPublicSubnetName", "type": "WorkspaceCustomStringParameter"},
"custom_private_subnet_name": {"key": "customPrivateSubnetName", "type": "WorkspaceCustomStringParameter"},
- "enable_no_public_ip": {"key": "enableNoPublicIp", "type": "WorkspaceCustomBooleanParameter"},
+ "enable_no_public_ip": {"key": "enableNoPublicIp", "type": "WorkspaceNoPublicIPBooleanParameter"},
"load_balancer_backend_pool_name": {
"key": "loadBalancerBackendPoolName",
"type": "WorkspaceCustomStringParameter",
@@ -2027,7 +2245,7 @@ def __init__(
custom_virtual_network_id: Optional["_models.WorkspaceCustomStringParameter"] = None,
custom_public_subnet_name: Optional["_models.WorkspaceCustomStringParameter"] = None,
custom_private_subnet_name: Optional["_models.WorkspaceCustomStringParameter"] = None,
- enable_no_public_ip: Optional["_models.WorkspaceCustomBooleanParameter"] = None,
+ enable_no_public_ip: Optional["_models.WorkspaceNoPublicIPBooleanParameter"] = None,
load_balancer_backend_pool_name: Optional["_models.WorkspaceCustomStringParameter"] = None,
load_balancer_id: Optional["_models.WorkspaceCustomStringParameter"] = None,
nat_gateway_name: Optional["_models.WorkspaceCustomStringParameter"] = None,
@@ -2054,8 +2272,10 @@ def __init__(
:keyword custom_private_subnet_name: The name of the Private Subnet within the Virtual Network.
:paramtype custom_private_subnet_name:
~azure.mgmt.databricks.models.WorkspaceCustomStringParameter
- :keyword enable_no_public_ip: Should the Public IP be Disabled?.
- :paramtype enable_no_public_ip: ~azure.mgmt.databricks.models.WorkspaceCustomBooleanParameter
+ :keyword enable_no_public_ip: Boolean indicating whether the public IP should be disabled.
+ Default value is true.
+ :paramtype enable_no_public_ip:
+ ~azure.mgmt.databricks.models.WorkspaceNoPublicIPBooleanParameter
:keyword load_balancer_backend_pool_name: Name of the outbound Load Balancer Backend Pool for
Secure Cluster Connectivity (No Public IP).
:paramtype load_balancer_backend_pool_name:
@@ -2113,7 +2333,7 @@ class WorkspaceCustomStringParameter(_serialization.Model):
Variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar type: The type of variable that this is. Known values are: "Bool", "Object", and
"String".
@@ -2201,10 +2421,96 @@ def __init__(
self.next_link = next_link
+class WorkspaceNoPublicIPBooleanParameter(_serialization.Model):
+ """The value which should be used for this field.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar type: The type of variable that this is. Known values are: "Bool", "Object", and
+ "String".
+ :vartype type: str or ~azure.mgmt.databricks.models.CustomParameterType
+ :ivar value: The value which should be used for this field. Required.
+ :vartype value: bool
+ """
+
+ _validation = {
+ "type": {"readonly": True},
+ "value": {"required": True},
+ }
+
+ _attribute_map = {
+ "type": {"key": "type", "type": "str"},
+ "value": {"key": "value", "type": "bool"},
+ }
+
+ def __init__(self, *, value: bool, **kwargs: Any) -> None:
+ """
+ :keyword value: The value which should be used for this field. Required.
+ :paramtype value: bool
+ """
+ super().__init__(**kwargs)
+ self.type = None
+ self.value = value
+
+
+class WorkspacePropertiesAccessConnector(_serialization.Model):
+ """Access Connector Resource that is going to be associated with Databricks Workspace.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar id: The resource ID of Azure Databricks Access Connector Resource. Required.
+ :vartype id: str
+ :ivar identity_type: The identity type of the Access Connector Resource. Required. Known values
+ are: "SystemAssigned" and "UserAssigned".
+ :vartype identity_type: str or ~azure.mgmt.databricks.models.IdentityType
+ :ivar user_assigned_identity_id: The resource ID of the User Assigned Identity associated with
+ the Access Connector Resource. This is required for type 'UserAssigned' and not valid for type
+ 'SystemAssigned'.
+ :vartype user_assigned_identity_id: str
+ """
+
+ _validation = {
+ "id": {"required": True},
+ "identity_type": {"required": True},
+ }
+
+ _attribute_map = {
+ "id": {"key": "id", "type": "str"},
+ "identity_type": {"key": "identityType", "type": "str"},
+ "user_assigned_identity_id": {"key": "userAssignedIdentityId", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ identity_type: Union[str, "_models.IdentityType"],
+ user_assigned_identity_id: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword id: The resource ID of Azure Databricks Access Connector Resource. Required.
+ :paramtype id: str
+ :keyword identity_type: The identity type of the Access Connector Resource. Required. Known
+ values are: "SystemAssigned" and "UserAssigned".
+ :paramtype identity_type: str or ~azure.mgmt.databricks.models.IdentityType
+ :keyword user_assigned_identity_id: The resource ID of the User Assigned Identity associated
+ with the Access Connector Resource. This is required for type 'UserAssigned' and not valid for
+ type 'SystemAssigned'.
+ :paramtype user_assigned_identity_id: str
+ """
+ super().__init__(**kwargs)
+ self.id = id
+ self.identity_type = identity_type
+ self.user_assigned_identity_id = user_assigned_identity_id
+
+
class WorkspacePropertiesEncryption(_serialization.Model):
"""Encryption properties for databricks workspace.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar entities: Encryption entities definition for the workspace. Required.
:vartype entities: ~azure.mgmt.databricks.models.EncryptionEntitiesDefinition
@@ -2230,7 +2536,7 @@ def __init__(self, *, entities: "_models.EncryptionEntitiesDefinition", **kwargs
class WorkspaceProviderAuthorization(_serialization.Model):
"""The workspace provider authorization.
- All required parameters must be populated in order to send to Azure.
+ All required parameters must be populated in order to send to server.
:ivar principal_id: The provider's principal identifier. This is the identity that the provider
will use to call ARM to manage the workspace resources. Required.
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/__init__.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/__init__.py
index fb2a003837b4..d3f9857a4004 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/__init__.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/__init__.py
@@ -5,17 +5,23 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._workspaces_operations import WorkspacesOperations
-from ._operations import Operations
-from ._private_link_resources_operations import PrivateLinkResourcesOperations
-from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
-from ._outbound_network_dependencies_endpoints_operations import OutboundNetworkDependenciesEndpointsOperations
-from ._vnet_peering_operations import VNetPeeringOperations
-from ._access_connectors_operations import AccessConnectorsOperations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._workspaces_operations import WorkspacesOperations # type: ignore
+from ._operations import Operations # type: ignore
+from ._private_link_resources_operations import PrivateLinkResourcesOperations # type: ignore
+from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations # type: ignore
+from ._outbound_network_dependencies_endpoints_operations import OutboundNetworkDependenciesEndpointsOperations # type: ignore
+from ._vnet_peering_operations import VNetPeeringOperations # type: ignore
+from ._access_connectors_operations import AccessConnectorsOperations # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -27,5 +33,5 @@
"VNetPeeringOperations",
"AccessConnectorsOperations",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_access_connectors_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_access_connectors_operations.py
index c654e272a4e8..0db600647244 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_access_connectors_operations.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_access_connectors_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,9 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
+import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -15,13 +16,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -29,8 +31,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -44,7 +49,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -54,13 +59,13 @@ def build_get_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"connectorName": _SERIALIZER.url("connector_name", connector_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -77,7 +82,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -87,13 +92,13 @@ def build_delete_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"connectorName": _SERIALIZER.url("connector_name", connector_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -110,7 +115,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -121,13 +126,13 @@ def build_create_or_update_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"connectorName": _SERIALIZER.url("connector_name", connector_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -146,7 +151,7 @@ def build_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -157,13 +162,13 @@ def build_update_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"connectorName": _SERIALIZER.url("connector_name", connector_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -180,7 +185,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -190,12 +195,12 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -210,7 +215,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -219,7 +224,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -251,19 +256,18 @@ def __init__(self, *args, **kwargs):
@distributed_trace
def get(self, resource_group_name: str, connector_name: str, **kwargs: Any) -> _models.AccessConnector:
- """Gets an azure databricks accessConnector.
+ """Gets an Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: AccessConnector or the result of cls(response)
:rtype: ~azure.mgmt.databricks.models.AccessConnector
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -274,24 +278,22 @@ def get(self, resource_group_name: str, connector_name: str, **kwargs: Any) -> _
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.AccessConnector] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
connector_name=connector_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -301,21 +303,15 @@ def get(self, resource_group_name: str, connector_name: str, **kwargs: Any) -> _
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("AccessConnector", pipeline_response)
+ deserialized = self._deserialize("AccessConnector", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"
- }
+ return deserialized # type: ignore
- def _delete_initial( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, connector_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ def _delete_initial(self, resource_group_name: str, connector_name: str, **kwargs: Any) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -326,57 +322,52 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
connector_name=connector_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"
- }
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(self, resource_group_name: str, connector_name: str, **kwargs: Any) -> LROPoller[None]:
- """Deletes the azure databricks accessConnector.
+ """Deletes the Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -384,13 +375,13 @@ def begin_delete(self, resource_group_name: str, connector_name: str, **kwargs:
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
connector_name=connector_name,
api_version=api_version,
@@ -399,11 +390,12 @@ def begin_delete(self, resource_group_name: str, connector_name: str, **kwargs:
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
@@ -412,26 +404,22 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"
- }
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
def _create_or_update_initial(
self,
resource_group_name: str,
connector_name: str,
- parameters: Union[_models.AccessConnector, IO],
+ parameters: Union[_models.AccessConnector, IO[bytes]],
**kwargs: Any
- ) -> _models.AccessConnector:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -442,9 +430,9 @@ def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.AccessConnector] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -454,7 +442,7 @@ def _create_or_update_initial(
else:
_json = self._serialize.body(parameters, "AccessConnector")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
connector_name=connector_name,
subscription_id=self._config.subscription_id,
@@ -462,40 +450,35 @@ def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("AccessConnector", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("AccessConnector", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"
- }
-
@overload
def begin_create_or_update(
self,
@@ -506,27 +489,19 @@ def begin_create_or_update(
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.AccessConnector]:
- """Creates or updates azure databricks accessConnector.
+ """Creates or updates Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :param parameters: Parameters supplied to the create or update an azure databricks
- accessConnector. Required.
+ :param parameters: Parameters supplied to the create or update an Azure Databricks Access
+ Connector. Required.
:type parameters: ~azure.mgmt.databricks.models.AccessConnector
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either AccessConnector or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.AccessConnector]
@@ -538,32 +513,24 @@ def begin_create_or_update(
self,
resource_group_name: str,
connector_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.AccessConnector]:
- """Creates or updates azure databricks accessConnector.
+ """Creates or updates Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :param parameters: Parameters supplied to the create or update an azure databricks
- accessConnector. Required.
- :type parameters: IO
+ :param parameters: Parameters supplied to the create or update an Azure Databricks Access
+ Connector. Required.
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either AccessConnector or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.AccessConnector]
@@ -575,30 +542,19 @@ def begin_create_or_update(
self,
resource_group_name: str,
connector_name: str,
- parameters: Union[_models.AccessConnector, IO],
+ parameters: Union[_models.AccessConnector, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.AccessConnector]:
- """Creates or updates azure databricks accessConnector.
+ """Creates or updates Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :param parameters: Parameters supplied to the create or update an azure databricks
- accessConnector. Is either a AccessConnector type or a IO type. Required.
- :type parameters: ~azure.mgmt.databricks.models.AccessConnector or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: Parameters supplied to the create or update an Azure Databricks Access
+ Connector. Is either a AccessConnector type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.databricks.models.AccessConnector or IO[bytes]
:return: An instance of LROPoller that returns either AccessConnector or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.AccessConnector]
@@ -607,7 +563,7 @@ def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.AccessConnector] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
@@ -625,12 +581,13 @@ def begin_create_or_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("AccessConnector", pipeline_response)
+ deserialized = self._deserialize("AccessConnector", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -640,26 +597,24 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.AccessConnector].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"
- }
+ return LROPoller[_models.AccessConnector](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
def _update_initial(
self,
resource_group_name: str,
connector_name: str,
- parameters: Union[_models.AccessConnectorUpdate, IO],
+ parameters: Union[_models.AccessConnectorUpdate, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.AccessConnector]:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -670,9 +625,9 @@ def _update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.AccessConnector]] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -682,7 +637,7 @@ def _update_initial(
else:
_json = self._serialize.body(parameters, "AccessConnectorUpdate")
- request = build_update_request(
+ _request = build_update_request(
resource_group_name=resource_group_name,
connector_name=connector_name,
subscription_id=self._config.subscription_id,
@@ -690,37 +645,34 @@ def _update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize("AccessConnector", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"
- }
+ return deserialized # type: ignore
@overload
def begin_update(
@@ -732,26 +684,18 @@ def begin_update(
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.AccessConnector]:
- """Updates an azure databricks accessConnector.
+ """Updates an Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :param parameters: The update to the azure databricks accessConnector. Required.
+ :param parameters: The update to the Azure Databricks Access Connector. Required.
:type parameters: ~azure.mgmt.databricks.models.AccessConnectorUpdate
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either AccessConnector or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.AccessConnector]
@@ -763,31 +707,23 @@ def begin_update(
self,
resource_group_name: str,
connector_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.AccessConnector]:
- """Updates an azure databricks accessConnector.
+ """Updates an Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :param parameters: The update to the azure databricks accessConnector. Required.
- :type parameters: IO
+ :param parameters: The update to the Azure Databricks Access Connector. Required.
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either AccessConnector or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.AccessConnector]
@@ -799,30 +735,19 @@ def begin_update(
self,
resource_group_name: str,
connector_name: str,
- parameters: Union[_models.AccessConnectorUpdate, IO],
+ parameters: Union[_models.AccessConnectorUpdate, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.AccessConnector]:
- """Updates an azure databricks accessConnector.
+ """Updates an Azure Databricks Access Connector.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :param connector_name: The name of the azure databricks accessConnector. Required.
+ :param connector_name: The name of the Azure Databricks Access Connector. Required.
:type connector_name: str
- :param parameters: The update to the azure databricks accessConnector. Is either a
- AccessConnectorUpdate type or a IO type. Required.
- :type parameters: ~azure.mgmt.databricks.models.AccessConnectorUpdate or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: The update to the Azure Databricks Access Connector. Is either a
+ AccessConnectorUpdate type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.databricks.models.AccessConnectorUpdate or IO[bytes]
:return: An instance of LROPoller that returns either AccessConnector or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.AccessConnector]
@@ -831,7 +756,7 @@ def begin_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.AccessConnector] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
@@ -849,12 +774,13 @@ def begin_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("AccessConnector", pipeline_response)
+ deserialized = self._deserialize("AccessConnector", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -864,26 +790,23 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.AccessConnector].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"
- }
+ return LROPoller[_models.AccessConnector](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace
def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.AccessConnector"]:
- """Gets all the azure databricks accessConnectors within a resource group.
+ """Gets all the Azure Databricks Access Connectors within a resource group.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AccessConnector or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.AccessConnector]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -891,10 +814,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.AccessConnectorListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -905,23 +828,31 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_resource_group_request(
+ _request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("AccessConnectorListResult", pipeline_response)
@@ -931,11 +862,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -948,15 +879,10 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list_by_resource_group.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors"
- }
-
@distributed_trace
def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.AccessConnector"]:
- """Gets all the azure databricks accessConnectors within a subscription.
+ """Gets all the Azure Databricks Access Connectors within a subscription.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AccessConnector or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.AccessConnector]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -964,10 +890,10 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.AccessConnect
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.AccessConnectorListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -978,22 +904,30 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.AccessConnect
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_subscription_request(
+ _request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_subscription.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("AccessConnectorListResult", pipeline_response)
@@ -1003,11 +937,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1019,7 +953,3 @@ def get_next(next_link=None):
return pipeline_response
return ItemPaged(get_next, extract_data)
-
- list_by_subscription.metadata = {
- "url": "/subscriptions/{subscriptionId}/providers/Microsoft.Databricks/accessConnectors"
- }
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_operations.py
index f7ffaffe1228..814d2e52b271 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_operations.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,7 +5,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
+import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -18,16 +19,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -39,7 +42,7 @@ def build_list_request(**kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -77,7 +80,6 @@ def __init__(self, *args, **kwargs):
def list(self, **kwargs: Any) -> Iterable["_models.Operation"]:
"""Lists all of the available RP operations.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Operation or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.Operation]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -85,10 +87,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.Operation"]:
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -99,21 +101,29 @@ def list(self, **kwargs: Any) -> Iterable["_models.Operation"]:
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("OperationListResult", pipeline_response)
@@ -123,11 +133,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -139,5 +149,3 @@ def get_next(next_link=None):
return pipeline_response
return ItemPaged(get_next, extract_data)
-
- list.metadata = {"url": "/providers/Microsoft.Databricks/operations"}
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_outbound_network_dependencies_endpoints_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_outbound_network_dependencies_endpoints_operations.py
index d5a292eb45b2..24c0aba4d452 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_outbound_network_dependencies_endpoints_operations.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_outbound_network_dependencies_endpoints_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,6 +5,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, Callable, Dict, List, Optional, TypeVar
from azure.core.exceptions import (
@@ -17,16 +17,18 @@
map_error,
)
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -40,7 +42,7 @@ def build_list_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -50,13 +52,13 @@ def build_list_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -67,7 +69,7 @@ def build_list_request(
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
-class OutboundNetworkDependenciesEndpointsOperations:
+class OutboundNetworkDependenciesEndpointsOperations: # pylint: disable=name-too-long
"""
.. warning::
**DO NOT** instantiate this class directly.
@@ -102,12 +104,11 @@ def list(
:type resource_group_name: str
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: list of OutboundEnvironmentEndpoint or the result of cls(response)
:rtype: list[~azure.mgmt.databricks.models.OutboundEnvironmentEndpoint]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -118,24 +119,22 @@ def list(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[List[_models.OutboundEnvironmentEndpoint]] = kwargs.pop("cls", None)
- request = build_list_request(
+ _request = build_list_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -145,13 +144,9 @@ def list(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("[OutboundEnvironmentEndpoint]", pipeline_response)
+ deserialized = self._deserialize("[OutboundEnvironmentEndpoint]", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- return deserialized
-
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints"
- }
+ return deserialized # type: ignore
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_endpoint_connections_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_endpoint_connections_operations.py
index ffe13a16a692..dd2eab17931b 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_endpoint_connections_operations.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_endpoint_connections_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,9 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
+import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -15,13 +16,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -29,8 +31,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -44,7 +49,7 @@ def build_list_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -54,13 +59,13 @@ def build_list_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -81,7 +86,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -91,7 +96,7 @@ def build_get_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
@@ -100,7 +105,7 @@ def build_get_request(
),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -121,7 +126,7 @@ def build_create_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -132,7 +137,7 @@ def build_create_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
@@ -141,7 +146,7 @@ def build_create_request(
),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -164,7 +169,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -174,7 +179,7 @@ def build_delete_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
@@ -183,7 +188,7 @@ def build_delete_request(
),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -226,7 +231,6 @@ def list(
:type resource_group_name: str
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PrivateEndpointConnection or the result of
cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.PrivateEndpointConnection]
@@ -235,10 +239,10 @@ def list(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.PrivateEndpointConnectionsList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -249,24 +253,32 @@ def list(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("PrivateEndpointConnectionsList", pipeline_response)
@@ -276,11 +288,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -293,10 +305,6 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections"
- }
-
@distributed_trace
def get(
self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any
@@ -312,12 +320,11 @@ def get(
:type workspace_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection. Required.
:type private_endpoint_connection_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateEndpointConnection or the result of cls(response)
:rtype: ~azure.mgmt.databricks.models.PrivateEndpointConnection
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -328,25 +335,23 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -356,26 +361,22 @@ def get(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
+ deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"
- }
+ return deserialized # type: ignore
def _create_initial(
self,
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
- private_endpoint_connection: Union[_models.PrivateEndpointConnection, IO],
+ private_endpoint_connection: Union[_models.PrivateEndpointConnection, IO[bytes]],
**kwargs: Any
- ) -> _models.PrivateEndpointConnection:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -386,9 +387,9 @@ def _create_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -398,7 +399,7 @@ def _create_initial(
else:
_json = self._serialize.body(private_endpoint_connection, "PrivateEndpointConnection")
- request = build_create_request(
+ _request = build_create_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
@@ -407,40 +408,35 @@ def _create_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
-
- if response.status_code == 202:
- deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"
- }
-
@overload
def begin_create(
self,
@@ -469,14 +465,6 @@ def begin_create(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either PrivateEndpointConnection or the result
of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.PrivateEndpointConnection]
@@ -489,7 +477,7 @@ def begin_create(
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
- private_endpoint_connection: IO,
+ private_endpoint_connection: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -507,18 +495,10 @@ def begin_create(
:type private_endpoint_connection_name: str
:param private_endpoint_connection: The private endpoint connection with updated properties.
Required.
- :type private_endpoint_connection: IO
+ :type private_endpoint_connection: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either PrivateEndpointConnection or the result
of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.PrivateEndpointConnection]
@@ -531,7 +511,7 @@ def begin_create(
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
- private_endpoint_connection: Union[_models.PrivateEndpointConnection, IO],
+ private_endpoint_connection: Union[_models.PrivateEndpointConnection, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.PrivateEndpointConnection]:
"""Update private endpoint connection status.
@@ -546,20 +526,9 @@ def begin_create(
:param private_endpoint_connection_name: The name of the private endpoint connection. Required.
:type private_endpoint_connection_name: str
:param private_endpoint_connection: The private endpoint connection with updated properties. Is
- either a PrivateEndpointConnection type or a IO type. Required.
+ either a PrivateEndpointConnection type or a IO[bytes] type. Required.
:type private_endpoint_connection: ~azure.mgmt.databricks.models.PrivateEndpointConnection or
- IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ IO[bytes]
:return: An instance of LROPoller that returns either PrivateEndpointConnection or the result
of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.PrivateEndpointConnection]
@@ -568,7 +537,7 @@ def begin_create(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
@@ -587,12 +556,13 @@ def begin_create(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
+ deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -602,22 +572,20 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.PrivateEndpointConnection].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"
- }
+ return LROPoller[_models.PrivateEndpointConnection](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
- def _delete_initial( # pylint: disable=inconsistent-return-statements
+ def _delete_initial(
self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -628,40 +596,43 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"
- }
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(
@@ -678,14 +649,6 @@ def begin_delete(
:type workspace_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection. Required.
:type private_endpoint_connection_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -693,13 +656,13 @@ def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
@@ -709,11 +672,12 @@ def begin_delete(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
@@ -722,14 +686,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"
- }
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_link_resources_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_link_resources_operations.py
index ef2b862e3862..8b0a27c23535 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_link_resources_operations.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_link_resources_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -6,7 +5,9 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+import sys
from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
+import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -18,16 +19,18 @@
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -41,7 +44,7 @@ def build_list_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -51,13 +54,13 @@ def build_list_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -74,7 +77,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -84,14 +87,14 @@ def build_get_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"groupId": _SERIALIZER.url("group_id", group_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -134,7 +137,6 @@ def list(
:type resource_group_name: str
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either GroupIdInformation or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.GroupIdInformation]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -142,10 +144,10 @@ def list(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.PrivateLinkResourcesList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -156,24 +158,32 @@ def list(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_request(
+ _request = build_list_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("PrivateLinkResourcesList", pipeline_response)
@@ -183,11 +193,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -200,10 +210,6 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources"
- }
-
@distributed_trace
def get(
self, resource_group_name: str, workspace_name: str, group_id: str, **kwargs: Any
@@ -219,12 +225,11 @@ def get(
:type workspace_name: str
:param group_id: The name of the private link resource. Required.
:type group_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: GroupIdInformation or the result of cls(response)
:rtype: ~azure.mgmt.databricks.models.GroupIdInformation
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -235,25 +240,23 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.GroupIdInformation] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
group_id=group_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -263,13 +266,9 @@ def get(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("GroupIdInformation", pipeline_response)
+ deserialized = self._deserialize("GroupIdInformation", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources/{groupId}"
- }
+ return deserialized # type: ignore
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_vnet_peering_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_vnet_peering_operations.py
index 4d8af85087e4..9ab79024cc18 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_vnet_peering_operations.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_vnet_peering_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,9 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
+import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -15,13 +16,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -29,8 +31,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -44,7 +49,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -54,14 +59,14 @@ def build_get_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"peeringName": _SERIALIZER.url("peering_name", peering_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -78,7 +83,7 @@ def build_delete_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -88,14 +93,14 @@ def build_delete_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"peeringName": _SERIALIZER.url("peering_name", peering_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -112,7 +117,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -123,14 +128,14 @@ def build_create_or_update_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"peeringName": _SERIALIZER.url("peering_name", peering_name, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -149,7 +154,7 @@ def build_list_by_workspace_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -159,13 +164,13 @@ def build_list_by_workspace_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -208,12 +213,11 @@ def get(
:type workspace_name: str
:param peering_name: The name of the workspace vNet peering. Required.
:type peering_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkPeering or None or the result of cls(response)
:rtype: ~azure.mgmt.databricks.models.VirtualNetworkPeering or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -224,25 +228,23 @@ def get(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[Optional[_models.VirtualNetworkPeering]] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
peering_name=peering_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -254,21 +256,17 @@ def get(
deserialized = None
if response.status_code == 200:
- deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response)
+ deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"
- }
+ return deserialized # type: ignore
- def _delete_initial( # pylint: disable=inconsistent-return-statements
+ def _delete_initial(
self, resource_group_name: str, workspace_name: str, peering_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -279,40 +277,43 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
peering_name=peering_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"
- }
+ return deserialized # type: ignore
@distributed_trace
def begin_delete(
@@ -327,14 +328,6 @@ def begin_delete(
:type workspace_name: str
:param peering_name: The name of the workspace vNet peering. Required.
:type peering_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -342,13 +335,13 @@ def begin_delete(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
peering_name=peering_name,
@@ -358,11 +351,12 @@ def begin_delete(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
@@ -371,27 +365,23 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"
- }
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
def _create_or_update_initial(
self,
resource_group_name: str,
workspace_name: str,
peering_name: str,
- virtual_network_peering_parameters: Union[_models.VirtualNetworkPeering, IO],
+ virtual_network_peering_parameters: Union[_models.VirtualNetworkPeering, IO[bytes]],
**kwargs: Any
- ) -> _models.VirtualNetworkPeering:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -402,9 +392,9 @@ def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.VirtualNetworkPeering] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -414,7 +404,7 @@ def _create_or_update_initial(
else:
_json = self._serialize.body(virtual_network_peering_parameters, "VirtualNetworkPeering")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
peering_name=peering_name,
@@ -423,40 +413,35 @@ def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"
- }
-
@overload
def begin_create_or_update(
self,
@@ -483,14 +468,6 @@ def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkPeering or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.VirtualNetworkPeering]
@@ -503,7 +480,7 @@ def begin_create_or_update(
resource_group_name: str,
workspace_name: str,
peering_name: str,
- virtual_network_peering_parameters: IO,
+ virtual_network_peering_parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -519,18 +496,10 @@ def begin_create_or_update(
:type peering_name: str
:param virtual_network_peering_parameters: Parameters supplied to the create workspace vNet
Peering. Required.
- :type virtual_network_peering_parameters: IO
+ :type virtual_network_peering_parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkPeering or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.VirtualNetworkPeering]
@@ -543,7 +512,7 @@ def begin_create_or_update(
resource_group_name: str,
workspace_name: str,
peering_name: str,
- virtual_network_peering_parameters: Union[_models.VirtualNetworkPeering, IO],
+ virtual_network_peering_parameters: Union[_models.VirtualNetworkPeering, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.VirtualNetworkPeering]:
"""Creates vNet Peering for workspace.
@@ -556,20 +525,9 @@ def begin_create_or_update(
:param peering_name: The name of the workspace vNet peering. Required.
:type peering_name: str
:param virtual_network_peering_parameters: Parameters supplied to the create workspace vNet
- Peering. Is either a VirtualNetworkPeering type or a IO type. Required.
+ Peering. Is either a VirtualNetworkPeering type or a IO[bytes] type. Required.
:type virtual_network_peering_parameters: ~azure.mgmt.databricks.models.VirtualNetworkPeering
- or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ or IO[bytes]
:return: An instance of LROPoller that returns either VirtualNetworkPeering or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.VirtualNetworkPeering]
@@ -578,7 +536,7 @@ def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.VirtualNetworkPeering] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
@@ -597,12 +555,13 @@ def begin_create_or_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response)
+ deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -612,17 +571,15 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.VirtualNetworkPeering].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"
- }
+ return LROPoller[_models.VirtualNetworkPeering](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace
def list_by_workspace(
@@ -635,7 +592,6 @@ def list_by_workspace(
:type resource_group_name: str
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkPeering or the result of
cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.VirtualNetworkPeering]
@@ -644,10 +600,10 @@ def list_by_workspace(
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.VirtualNetworkPeeringList] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -658,24 +614,32 @@ def list_by_workspace(
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_workspace_request(
+ _request = build_list_by_workspace_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_workspace.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("VirtualNetworkPeeringList", pipeline_response)
@@ -685,11 +649,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -701,7 +665,3 @@ def get_next(next_link=None):
return pipeline_response
return ItemPaged(get_next, extract_data)
-
- list_by_workspace.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings"
- }
diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_workspaces_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_workspaces_operations.py
index f45b6816d261..a094535ef46c 100644
--- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_workspaces_operations.py
+++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_workspaces_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -7,7 +6,9 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
-from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
+import sys
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, TypeVar, Union, cast, overload
+import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
@@ -15,13 +16,14 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.core.rest import HttpRequest
+from azure.core.rest import HttpRequest, HttpResponse
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
@@ -29,8 +31,11 @@
from .. import models as _models
from .._serialization import Serializer
-from .._vendor import _convert_request, _format_url_section
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -44,7 +49,7 @@ def build_get_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -54,13 +59,13 @@ def build_get_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -72,12 +77,12 @@ def build_get_request(
def build_delete_request(
- resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any
+ resource_group_name: str, workspace_name: str, subscription_id: str, *, force_deletion: bool = False, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -87,16 +92,18 @@ def build_delete_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
+ if force_deletion is not None:
+ _params["forceDeletion"] = _SERIALIZER.query("force_deletion", force_deletion, "bool")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
@@ -110,7 +117,7 @@ def build_create_or_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -121,13 +128,13 @@ def build_create_or_update_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -146,7 +153,7 @@ def build_update_request(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
@@ -157,13 +164,13 @@ def build_update_request(
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -180,7 +187,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -190,12 +197,12 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
- "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
+ "resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -210,7 +217,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-03-01-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
@@ -219,7 +226,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
- _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
+ _url: str = _url.format(**path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
@@ -258,12 +265,11 @@ def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _
:type resource_group_name: str
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: Workspace or the result of cls(response)
:rtype: ~azure.mgmt.databricks.models.Workspace
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -274,24 +280,22 @@ def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.Workspace] = kwargs.pop("cls", None)
- request = build_get_request(
+ _request = build_get_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -301,21 +305,17 @@ def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = self._deserialize("Workspace", pipeline_response)
+ deserialized = self._deserialize("Workspace", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- get.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"
- }
+ return deserialized # type: ignore
- def _delete_initial( # pylint: disable=inconsistent-return-statements
- self, resource_group_name: str, workspace_name: str, **kwargs: Any
- ) -> None:
- error_map = {
+ def _delete_initial(
+ self, resource_group_name: str, workspace_name: str, force_deletion: bool = False, **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -326,42 +326,48 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
- cls: ClsType[None] = kwargs.pop("cls", None)
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
- request = build_delete_request(
+ _request = build_delete_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
+ force_deletion=force_deletion,
api_version=api_version,
- template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _delete_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"
- }
+ return deserialized # type: ignore
@distributed_trace
- def begin_delete(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> LROPoller[None]:
+ def begin_delete(
+ self, resource_group_name: str, workspace_name: str, force_deletion: bool = False, **kwargs: Any
+ ) -> LROPoller[None]:
"""Deletes the workspace.
:param resource_group_name: The name of the resource group. The name is case insensitive.
@@ -369,14 +375,9 @@ def begin_delete(self, resource_group_name: str, workspace_name: str, **kwargs:
:type resource_group_name: str
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param force_deletion: Optional parameter to retain default unity catalog data. By default the
+ data will retained if Uc is enabled on the workspace. Default value is False.
+ :type force_deletion: bool
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -384,26 +385,28 @@ def begin_delete(self, resource_group_name: str, workspace_name: str, **kwargs:
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
- raw_result = self._delete_initial( # type: ignore
+ raw_result = self._delete_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
+ force_deletion=force_deletion,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {}) # type: ignore
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
@@ -412,22 +415,22 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[None].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_delete.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"
- }
+ return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore
def _create_or_update_initial(
- self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any
- ) -> _models.Workspace:
- error_map = {
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: Union[_models.Workspace, IO[bytes]],
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -438,9 +441,9 @@ def _create_or_update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[_models.Workspace] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -450,7 +453,7 @@ def _create_or_update_initial(
else:
_json = self._serialize.body(parameters, "Workspace")
- request = build_create_or_update_request(
+ _request = build_create_or_update_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
@@ -458,40 +461,35 @@ def _create_or_update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code == 200:
- deserialized = self._deserialize("Workspace", pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize("Workspace", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
- _create_or_update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"
- }
-
@overload
def begin_create_or_update(
self,
@@ -514,14 +512,6 @@ def begin_create_or_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either Workspace or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.Workspace]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -532,7 +522,7 @@ def begin_create_or_update(
self,
resource_group_name: str,
workspace_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -545,18 +535,10 @@ def begin_create_or_update(
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
:param parameters: Parameters supplied to the create or update a workspace. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either Workspace or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.Workspace]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -564,7 +546,11 @@ def begin_create_or_update(
@distributed_trace
def begin_create_or_update(
- self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: Union[_models.Workspace, IO[bytes]],
+ **kwargs: Any
) -> LROPoller[_models.Workspace]:
"""Creates a new workspace.
@@ -574,19 +560,8 @@ def begin_create_or_update(
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
:param parameters: Parameters supplied to the create or update a workspace. Is either a
- Workspace type or a IO type. Required.
- :type parameters: ~azure.mgmt.databricks.models.Workspace or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ Workspace type or a IO[bytes] type. Required.
+ :type parameters: ~azure.mgmt.databricks.models.Workspace or IO[bytes]
:return: An instance of LROPoller that returns either Workspace or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.Workspace]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -594,7 +569,7 @@ def begin_create_or_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Workspace] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
@@ -612,12 +587,13 @@ def begin_create_or_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("Workspace", pipeline_response)
+ deserialized = self._deserialize("Workspace", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -627,26 +603,24 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.Workspace].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_create_or_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"
- }
+ return LROPoller[_models.Workspace](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
def _update_initial(
self,
resource_group_name: str,
workspace_name: str,
- parameters: Union[_models.WorkspaceUpdate, IO],
+ parameters: Union[_models.WorkspaceUpdate, IO[bytes]],
**kwargs: Any
- ) -> Optional[_models.Workspace]:
- error_map = {
+ ) -> Iterator[bytes]:
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -657,9 +631,9 @@ def _update_initial(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
- cls: ClsType[Optional[_models.Workspace]] = kwargs.pop("cls", None)
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
@@ -669,7 +643,7 @@ def _update_initial(
else:
_json = self._serialize.body(parameters, "WorkspaceUpdate")
- request = build_update_request(
+ _request = build_update_request(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
subscription_id=self._config.subscription_id,
@@ -677,37 +651,34 @@ def _update_initial(
content_type=content_type,
json=_json,
content=_content,
- template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
- _stream = False
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize("Workspace", pipeline_response)
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
+ return cls(pipeline_response, deserialized, {}) # type: ignore
- _update_initial.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"
- }
+ return deserialized # type: ignore
@overload
def begin_update(
@@ -731,14 +702,6 @@ def begin_update(
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either Workspace or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.Workspace]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -749,7 +712,7 @@ def begin_update(
self,
resource_group_name: str,
workspace_name: str,
- parameters: IO,
+ parameters: IO[bytes],
*,
content_type: str = "application/json",
**kwargs: Any
@@ -762,18 +725,10 @@ def begin_update(
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
:param parameters: The update to the workspace. Required.
- :type parameters: IO
+ :type parameters: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
:return: An instance of LROPoller that returns either Workspace or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.Workspace]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -784,7 +739,7 @@ def begin_update(
self,
resource_group_name: str,
workspace_name: str,
- parameters: Union[_models.WorkspaceUpdate, IO],
+ parameters: Union[_models.WorkspaceUpdate, IO[bytes]],
**kwargs: Any
) -> LROPoller[_models.Workspace]:
"""Updates a workspace.
@@ -794,20 +749,9 @@ def begin_update(
:type resource_group_name: str
:param workspace_name: The name of the workspace. Required.
:type workspace_name: str
- :param parameters: The update to the workspace. Is either a WorkspaceUpdate type or a IO type.
- Required.
- :type parameters: ~azure.mgmt.databricks.models.WorkspaceUpdate or IO
- :keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
- Default value is None.
- :paramtype content_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
- operation to not poll, or pass in your own initialized polling object for a personal polling
- strategy.
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
- Retry-After header is present.
+ :param parameters: The update to the workspace. Is either a WorkspaceUpdate type or a IO[bytes]
+ type. Required.
+ :type parameters: ~azure.mgmt.databricks.models.WorkspaceUpdate or IO[bytes]
:return: An instance of LROPoller that returns either Workspace or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.Workspace]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -815,7 +759,7 @@ def begin_update(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Workspace] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
@@ -833,12 +777,13 @@ def begin_update(
params=_params,
**kwargs
)
+ raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize("Workspace", pipeline_response)
+ deserialized = self._deserialize("Workspace", pipeline_response.http_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized
if polling is True:
@@ -848,17 +793,15 @@ def get_long_running_output(pipeline_response):
else:
polling_method = polling
if cont_token:
- return LROPoller.from_continuation_token(
+ return LROPoller[_models.Workspace].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
-
- begin_update.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"
- }
+ return LROPoller[_models.Workspace](
+ self._client, raw_result, get_long_running_output, polling_method # type: ignore
+ )
@distributed_trace
def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.Workspace"]:
@@ -867,7 +810,6 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Workspace or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.Workspace]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -875,10 +817,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -889,23 +831,31 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_resource_group_request(
+ _request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("WorkspaceListResult", pipeline_response)
@@ -915,11 +865,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -932,15 +882,10 @@ def get_next(next_link=None):
return ItemPaged(get_next, extract_data)
- list_by_resource_group.metadata = {
- "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces"
- }
-
@distributed_trace
def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.Workspace"]:
"""Gets all the workspaces within a subscription.
- :keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Workspace or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.Workspace]
:raises ~azure.core.exceptions.HttpResponseError:
@@ -948,10 +893,10 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.Workspace"]:
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
- api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01"))
+ api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None)
- error_map = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -962,22 +907,30 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.Workspace"]:
def prepare_request(next_link=None):
if not next_link:
- request = build_list_by_subscription_request(
+ _request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
- template_url=self.list_by_subscription.metadata["url"],
headers=_headers,
params=_params,
)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
+ _request.url = self._client.format_url(_request.url)
else:
- request = HttpRequest("GET", next_link)
- request = _convert_request(request)
- request.url = self._client.format_url(request.url)
- request.method = "GET"
- return request
+ # make call to next link with the client's api-version
+ _parsed_next_link = urllib.parse.urlparse(next_link)
+ _next_request_params = case_insensitive_dict(
+ {
+ key: [urllib.parse.quote(v) for v in value]
+ for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
+ }
+ )
+ _next_request_params["api-version"] = self._config.api_version
+ _request = HttpRequest(
+ "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
+ )
+ _request.url = self._client.format_url(_request.url)
+ _request.method = "GET"
+ return _request
def extract_data(pipeline_response):
deserialized = self._deserialize("WorkspaceListResult", pipeline_response)
@@ -987,11 +940,11 @@ def extract_data(pipeline_response):
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
- request = prepare_request(next_link)
+ _request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
- request, stream=_stream, **kwargs
+ _request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
@@ -1003,5 +956,3 @@ def get_next(next_link=None):
return pipeline_response
return ItemPaged(get_next, extract_data)
-
- list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Databricks/workspaces"}
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_create_or_update.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_create_or_update.py
index ad41beb497b1..7bba529398be 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_create_or_update.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_create_or_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.access_connectors.begin_create_or_update(
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-05-01/examples/AccessConnectorCreateOrUpdate.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/AccessConnectorCreateOrUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_create_or_update_with_user_assigned.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_create_or_update_with_user_assigned.py
index aeaeac41a4dc..7106e38d6c77 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_create_or_update_with_user_assigned.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_create_or_update_with_user_assigned.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.access_connectors.begin_create_or_update(
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-05-01/examples/AccessConnectorCreateOrUpdateWithUserAssigned.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/AccessConnectorCreateOrUpdateWithUserAssigned.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_delete.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_delete.py
index 382be65bf940..cb589ac3280e 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_delete.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
client.access_connectors.begin_delete(
@@ -35,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-05-01/examples/AccessConnectorDelete.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/AccessConnectorDelete.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_get.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_get.py
index 9915650cb0fc..74e27520a44a 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_get.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.access_connectors.get(
@@ -36,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-05-01/examples/AccessConnectorGet.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/AccessConnectorGet.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_patch_update.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_patch_update.py
index 90446cfa1dbf..7eb8e2c7bb50 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_patch_update.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connector_patch_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.access_connectors.begin_update(
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-05-01/examples/AccessConnectorPatchUpdate.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/AccessConnectorPatchUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connectors_list_by_resource_group.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connectors_list_by_resource_group.py
index 03fcc99d8436..b894f890d66a 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connectors_list_by_resource_group.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connectors_list_by_resource_group.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.access_connectors.list_by_resource_group(
@@ -36,6 +37,6 @@ def main():
print(item)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-05-01/examples/AccessConnectorsListByResourceGroup.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/AccessConnectorsListByResourceGroup.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connectors_list_by_subscription_id.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connectors_list_by_subscription_id.py
index e2c0b2130693..8106248179a6 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connectors_list_by_subscription_id.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/access_connectors_list_by_subscription_id.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.access_connectors.list_by_subscription()
@@ -34,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-05-01/examples/AccessConnectorsListBySubscriptionId.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/AccessConnectorsListBySubscriptionId.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/disable_encryption.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/disable_encryption.py
index 5b2885f2c212..2cd791eab691 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/disable_encryption.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/disable_encryption.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.workspaces.begin_create_or_update(
@@ -35,7 +36,7 @@ def main():
parameters={
"location": "westus",
"properties": {
- "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG",
+ "managedResourceGroupId": "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/myManagedRG",
"parameters": {"encryption": {"value": {"keySource": "Default"}}},
},
},
@@ -43,6 +44,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/DisableEncryption.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/DisableEncryption.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/enable_encryption.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/enable_encryption.py
index 5dcefd4874dd..e0270c7b7ab1 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/enable_encryption.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/enable_encryption.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.workspaces.begin_create_or_update(
@@ -35,7 +36,7 @@ def main():
parameters={
"location": "westus",
"properties": {
- "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG",
+ "managedResourceGroupId": "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/myManagedRG",
"parameters": {
"encryption": {
"value": {
@@ -53,6 +54,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/EnableEncryption.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/EnableEncryption.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_endpoint_connections.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_endpoint_connections.py
index 72771d1f6e64..ba539bba1008 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_endpoint_connections.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_endpoint_connections.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(item)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/ListPrivateEndpointConnections.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/ListPrivateEndpointConnections.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_link_resources.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_link_resources.py
index f6c949cdd93a..14d26a392ebf 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_link_resources.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_link_resources.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(item)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/ListPrivateLinkResources.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/ListPrivateLinkResources.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/operations_list.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/operations_list.py
index 8f4805d40a05..3e700fd97ecf 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/operations_list.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/operations_list.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -34,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/OperationsList.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/OperationsList.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/outbound_network_dependencies_endpoints_list.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/outbound_network_dependencies_endpoints_list.py
index 9e31d4e14af5..90c029477ed6 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/outbound_network_dependencies_endpoints_list.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/outbound_network_dependencies_endpoints_list.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -36,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/OutboundNetworkDependenciesEndpointsList.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/OutboundNetworkDependenciesEndpointsList.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/prepare_encryption.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/prepare_encryption.py
index 4c3469c30827..defd18a82cce 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/prepare_encryption.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/prepare_encryption.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.workspaces.begin_create_or_update(
@@ -35,7 +36,7 @@ def main():
parameters={
"location": "westus",
"properties": {
- "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG",
+ "managedResourceGroupId": "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/myManagedRG",
"parameters": {"prepareEncryption": {"value": True}},
},
},
@@ -43,6 +44,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/PrepareEncryption.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/PrepareEncryption.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/private_endpoint_connections_delete.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/private_endpoint_connections_delete.py
index 04977a28759f..fc4dbc190de7 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/private_endpoint_connections_delete.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/private_endpoint_connections_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -36,6 +37,6 @@ def main():
).result()
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/PrivateEndpointConnectionsDelete.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/PrivateEndpointConnectionsDelete.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/private_endpoint_connections_get.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/private_endpoint_connections_get.py
index a575472ea6a9..316575049049 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/private_endpoint_connections_get.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/private_endpoint_connections_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/PrivateEndpointConnectionsGet.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/PrivateEndpointConnectionsGet.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/private_endpoint_connections_update.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/private_endpoint_connections_update.py
index b6bd920b70dc..7ba09790e81e 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/private_endpoint_connections_update.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/private_endpoint_connections_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -45,6 +46,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/PrivateEndpointConnectionsUpdate.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/PrivateEndpointConnectionsUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/private_link_resources_get.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/private_link_resources_get.py
index 05e95ba4db48..f700decb36f1 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/private_link_resources_get.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/private_link_resources_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/PrivateLinkResourcesGet.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/PrivateLinkResourcesGet.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_create.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_create.py
index 281c09558388..9c6e5439b7b2 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_create.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_create.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.workspaces.begin_create_or_update(
@@ -34,12 +35,20 @@ def main():
workspace_name="myWorkspace",
parameters={
"location": "westus",
- "properties": {"managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG"},
+ "properties": {
+ "accessConnector": {
+ "id": "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/adbrg/providers/Microsoft.Databricks/accessConnectors/myAccessConnector",
+ "identityType": "SystemAssigned",
+ },
+ "defaultCatalog": {"initialName": "", "initialType": "UnityCatalog"},
+ "defaultStorageFirewall": "Enabled",
+ "managedResourceGroupId": "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/myManagedRG",
+ },
},
).result()
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/WorkspaceCreate.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspaceCreate.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_create_with_parameters.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_create_with_parameters.py
index 536e3d920eb5..bf7307c8aa60 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_create_with_parameters.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_create_with_parameters.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.workspaces.begin_create_or_update(
@@ -35,12 +36,19 @@ def main():
parameters={
"location": "westus",
"properties": {
- "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG",
+ "accessConnector": {
+ "id": "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/adbrg/providers/Microsoft.Databricks/accessConnectors/myAccessConnector",
+ "identityType": "UserAssigned",
+ "userAssignedIdentityId": "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/rg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myIdentity",
+ },
+ "defaultCatalog": {"initialName": "", "initialType": "HiveMetastore"},
+ "defaultStorageFirewall": "Enabled",
+ "managedResourceGroupId": "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/myManagedRG",
"parameters": {
"customPrivateSubnetName": {"value": "myPrivateSubnet"},
"customPublicSubnetName": {"value": "myPublicSubnet"},
"customVirtualNetworkId": {
- "value": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Network/virtualNetworks/myNetwork"
+ "value": "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/rg/providers/Microsoft.Network/virtualNetworks/myNetwork"
},
},
},
@@ -49,6 +57,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/WorkspaceCreateWithParameters.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspaceCreateWithParameters.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_delete.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_delete.py
index bf7af1a64123..69ded73b3abc 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_delete.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
client.workspaces.begin_delete(
@@ -35,6 +36,6 @@ def main():
).result()
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/WorkspaceDelete.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspaceDelete.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_delete_force_deletion.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_delete_force_deletion.py
new file mode 100644
index 000000000000..8823677e516c
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_delete_force_deletion.py
@@ -0,0 +1,41 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.databricks import AzureDatabricksManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-databricks
+# USAGE
+ python workspace_delete_force_deletion.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = AzureDatabricksManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="11111111-1111-1111-1111-111111111111",
+ )
+
+ client.workspaces.begin_delete(
+ resource_group_name="rg",
+ workspace_name="myWorkspace",
+ ).result()
+
+
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspaceDeleteForceDeletion.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_virtual_network_peering_create_or_update.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_enhanced_security_compliance_create_or_update.py
similarity index 64%
rename from sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_virtual_network_peering_create_or_update.py
rename to sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_enhanced_security_compliance_create_or_update.py
index fb6e44baca00..caf7408c8f7b 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_virtual_network_peering_create_or_update.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_enhanced_security_compliance_create_or_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -14,7 +15,7 @@
pip install azure-identity
pip install azure-mgmt-databricks
# USAGE
- python workspace_virtual_network_peering_create_or_update.py
+ python workspace_enhanced_security_compliance_create_or_update.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
@@ -26,28 +27,27 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
- response = client.vnet_peering.begin_create_or_update(
+ response = client.workspaces.begin_create_or_update(
resource_group_name="rg",
workspace_name="myWorkspace",
- peering_name="vNetPeeringTest",
- virtual_network_peering_parameters={
+ parameters={
+ "location": "eastus2",
"properties": {
- "allowForwardedTraffic": False,
- "allowGatewayTransit": False,
- "allowVirtualNetworkAccess": True,
- "remoteVirtualNetwork": {
- "id": "/subscriptions/0140911e-1040-48da-8bc9-b99fb3dd88a6/resourceGroups/subramantest/providers/Microsoft.Network/virtualNetworks/subramanvnet"
+ "enhancedSecurityCompliance": {
+ "automaticClusterUpdate": {"value": "Enabled"},
+ "complianceSecurityProfile": {"complianceStandards": ["PCI_DSS", "HIPAA"], "value": "Enabled"},
+ "enhancedSecurityMonitoring": {"value": "Enabled"},
},
- "useRemoteGateways": False,
- }
+ "managedResourceGroupId": "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/myManagedRG",
+ },
},
).result()
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/WorkspaceVirtualNetworkPeeringCreateOrUpdate.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspaceEnhancedSecurityComplianceCreateOrUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_enhanced_security_compliance_get.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_enhanced_security_compliance_get.py
new file mode 100644
index 000000000000..651465a15261
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_enhanced_security_compliance_get.py
@@ -0,0 +1,42 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.mgmt.databricks import AzureDatabricksManagementClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-mgmt-databricks
+# USAGE
+ python workspace_enhanced_security_compliance_get.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = AzureDatabricksManagementClient(
+ credential=DefaultAzureCredential(),
+ subscription_id="11111111-1111-1111-1111-111111111111",
+ )
+
+ response = client.workspaces.get(
+ resource_group_name="rg",
+ workspace_name="myWorkspace",
+ )
+ print(response)
+
+
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspaceEnhancedSecurityComplianceGet.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_get.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_get.py
index eeff767db884..3e33d8f32407 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_get.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.workspaces.get(
@@ -36,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/WorkspaceGet.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspaceGet.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_get_parameters.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_get_parameters.py
index f5635ad9eb64..ac85688c9d96 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_get_parameters.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_get_parameters.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.workspaces.get(
@@ -36,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/WorkspaceGetParameters.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspaceGetParameters.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_managed_disk_encryption_create.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_managed_disk_encryption_create.py
index 36786e299bb7..9d3274ec3997 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_managed_disk_encryption_create.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_managed_disk_encryption_create.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.workspaces.begin_create_or_update(
@@ -48,13 +49,13 @@ def main():
}
}
},
- "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG",
+ "managedResourceGroupId": "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/myManagedRG",
},
},
).result()
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/WorkspaceManagedDiskEncryptionCreate.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspaceManagedDiskEncryptionCreate.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_managed_disk_encryption_get.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_managed_disk_encryption_get.py
index cdea6e0feb72..69969654ab56 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_managed_disk_encryption_get.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_managed_disk_encryption_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.workspaces.get(
@@ -36,6 +37,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/WorkspaceManagedDiskEncryptionGet.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspaceManagedDiskEncryptionGet.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_managed_disk_encryption_update.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_managed_disk_encryption_update.py
index 0477f5f6bb62..141da7ba1745 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_managed_disk_encryption_update.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_managed_disk_encryption_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.workspaces.begin_create_or_update(
@@ -48,7 +49,7 @@ def main():
}
}
},
- "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG",
+ "managedResourceGroupId": "/subscriptions/11111111-1111-1111-1111-111111111111/resourceGroups/myManagedRG",
},
"tags": {"mytag1": "myvalue1"},
},
@@ -56,6 +57,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/WorkspaceManagedDiskEncryptionUpdate.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspaceManagedDiskEncryptionUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_update.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_update.py
index 936fa87f41a5..861d2470a3b3 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_update.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_update.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.workspaces.begin_update(
@@ -37,6 +38,6 @@ def main():
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/WorkspaceUpdate.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspaceUpdate.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_virtual_net_peering_get.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_virtual_net_peering_get.py
index ffe862a7571e..efedc030dac4 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_virtual_net_peering_get.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_virtual_net_peering_get.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,17 +27,17 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="0140911e-1040-48da-8bc9-b99fb3dd88a6/",
)
response = client.vnet_peering.get(
- resource_group_name="rg",
- workspace_name="myWorkspace",
- peering_name="vNetPeering",
+ resource_group_name="subramantest",
+ workspace_name="adbworkspace",
+ peering_name="vNetPeeringTest",
)
print(response)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/WorkspaceVirtualNetPeeringGet.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspaceVirtualNetPeeringGet.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_virtual_net_peering_list.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_virtual_net_peering_list.py
index b9b693f23db6..f2725d13eb56 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_virtual_net_peering_list.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_virtual_net_peering_list.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,17 +27,17 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="0140911e-1040-48da-8bc9-b99fb3dd88a6/",
)
response = client.vnet_peering.list_by_workspace(
- resource_group_name="rg",
- workspace_name="myWorkspace",
+ resource_group_name="subramantest",
+ workspace_name="adbworkspace",
)
for item in response:
print(item)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/WorkspaceVirtualNetPeeringList.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspaceVirtualNetPeeringList.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_virtual_network_peering_delete.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_virtual_network_peering_delete.py
index 299ae6938391..8cf68050e808 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_virtual_network_peering_delete.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspace_virtual_network_peering_delete.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,16 +27,16 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="0140911e-1040-48da-8bc9-b99fb3dd88a6/",
)
client.vnet_peering.begin_delete(
- resource_group_name="rg",
- workspace_name="myWorkspace",
- peering_name="vNetPeering",
+ resource_group_name="subramantest",
+ workspace_name="adbworkspace",
+ peering_name="vNetPeeringTest",
).result()
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/WorkspaceVirtualNetworkPeeringDelete.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspaceVirtualNetworkPeeringDelete.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspaces_list_by_resource_group.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspaces_list_by_resource_group.py
index 148a2b69d43c..aa544f367d59 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspaces_list_by_resource_group.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspaces_list_by_resource_group.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.workspaces.list_by_resource_group(
@@ -36,6 +37,6 @@ def main():
print(item)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/WorkspacesListByResourceGroup.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspacesListByResourceGroup.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspaces_list_by_subscription.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspaces_list_by_subscription.py
index ba81b1058a95..37b7a31e2ca5 100644
--- a/sdk/databricks/azure-mgmt-databricks/generated_samples/workspaces_list_by_subscription.py
+++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/workspaces_list_by_subscription.py
@@ -7,6 +7,7 @@
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
+
from azure.mgmt.databricks import AzureDatabricksManagementClient
"""
@@ -26,7 +27,7 @@
def main():
client = AzureDatabricksManagementClient(
credential=DefaultAzureCredential(),
- subscription_id="subid",
+ subscription_id="11111111-1111-1111-1111-111111111111",
)
response = client.workspaces.list_by_subscription()
@@ -34,6 +35,6 @@ def main():
print(item)
-# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/stable/2023-02-01/examples/WorkspacesListBySubscription.json
+# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2025-03-01-preview/examples/WorkspacesListBySubscription.json
if __name__ == "__main__":
main()
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_tests/conftest.py b/sdk/databricks/azure-mgmt-databricks/generated_tests/conftest.py
new file mode 100644
index 000000000000..6627880474b0
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_tests/conftest.py
@@ -0,0 +1,43 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import os
+import pytest
+from dotenv import load_dotenv
+from devtools_testutils import (
+ test_proxy,
+ add_general_regex_sanitizer,
+ add_body_key_sanitizer,
+ add_header_regex_sanitizer,
+)
+
+load_dotenv()
+
+
+# For security, please avoid record sensitive identity information in recordings
+@pytest.fixture(scope="session", autouse=True)
+def add_sanitizers(test_proxy):
+ azuredatabricksmanagement_subscription_id = os.environ.get(
+ "AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000"
+ )
+ azuredatabricksmanagement_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000")
+ azuredatabricksmanagement_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000")
+ azuredatabricksmanagement_client_secret = os.environ.get(
+ "AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000"
+ )
+ add_general_regex_sanitizer(
+ regex=azuredatabricksmanagement_subscription_id, value="00000000-0000-0000-0000-000000000000"
+ )
+ add_general_regex_sanitizer(regex=azuredatabricksmanagement_tenant_id, value="00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(regex=azuredatabricksmanagement_client_id, value="00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(
+ regex=azuredatabricksmanagement_client_secret, value="00000000-0000-0000-0000-000000000000"
+ )
+
+ add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]")
+ add_header_regex_sanitizer(key="Cookie", value="cookie;")
+ add_body_key_sanitizer(json_path="$..access_token", value="access_token")
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_access_connectors_operations.py b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_access_connectors_operations.py
new file mode 100644
index 000000000000..5bd1eb034e46
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_access_connectors_operations.py
@@ -0,0 +1,119 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.databricks import AzureDatabricksManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestAzureDatabricksManagementAccessConnectorsOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(AzureDatabricksManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_access_connectors_get(self, resource_group):
+ response = self.client.access_connectors.get(
+ resource_group_name=resource_group.name,
+ connector_name="str",
+ api_version="2025-03-01-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_access_connectors_begin_delete(self, resource_group):
+ response = self.client.access_connectors.begin_delete(
+ resource_group_name=resource_group.name,
+ connector_name="str",
+ api_version="2025-03-01-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_access_connectors_begin_create_or_update(self, resource_group):
+ response = self.client.access_connectors.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ connector_name="str",
+ parameters={
+ "location": "str",
+ "id": "str",
+ "identity": {
+ "type": "str",
+ "principalId": "str",
+ "tenantId": "str",
+ "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}},
+ },
+ "name": "str",
+ "properties": {"provisioningState": "str", "referedBy": ["str"]},
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ },
+ api_version="2025-03-01-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_access_connectors_begin_update(self, resource_group):
+ response = self.client.access_connectors.begin_update(
+ resource_group_name=resource_group.name,
+ connector_name="str",
+ parameters={
+ "identity": {
+ "type": "str",
+ "principalId": "str",
+ "tenantId": "str",
+ "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}},
+ },
+ "tags": {"str": "str"},
+ },
+ api_version="2025-03-01-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_access_connectors_list_by_resource_group(self, resource_group):
+ response = self.client.access_connectors.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2025-03-01-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_access_connectors_list_by_subscription(self, resource_group):
+ response = self.client.access_connectors.list_by_subscription(
+ api_version="2025-03-01-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_access_connectors_operations_async.py b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_access_connectors_operations_async.py
new file mode 100644
index 000000000000..ff74c3422551
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_access_connectors_operations_async.py
@@ -0,0 +1,126 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.databricks.aio import AzureDatabricksManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestAzureDatabricksManagementAccessConnectorsOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(AzureDatabricksManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_access_connectors_get(self, resource_group):
+ response = await self.client.access_connectors.get(
+ resource_group_name=resource_group.name,
+ connector_name="str",
+ api_version="2025-03-01-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_access_connectors_begin_delete(self, resource_group):
+ response = await (
+ await self.client.access_connectors.begin_delete(
+ resource_group_name=resource_group.name,
+ connector_name="str",
+ api_version="2025-03-01-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_access_connectors_begin_create_or_update(self, resource_group):
+ response = await (
+ await self.client.access_connectors.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ connector_name="str",
+ parameters={
+ "location": "str",
+ "id": "str",
+ "identity": {
+ "type": "str",
+ "principalId": "str",
+ "tenantId": "str",
+ "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}},
+ },
+ "name": "str",
+ "properties": {"provisioningState": "str", "referedBy": ["str"]},
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ },
+ api_version="2025-03-01-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_access_connectors_begin_update(self, resource_group):
+ response = await (
+ await self.client.access_connectors.begin_update(
+ resource_group_name=resource_group.name,
+ connector_name="str",
+ parameters={
+ "identity": {
+ "type": "str",
+ "principalId": "str",
+ "tenantId": "str",
+ "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}},
+ },
+ "tags": {"str": "str"},
+ },
+ api_version="2025-03-01-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_access_connectors_list_by_resource_group(self, resource_group):
+ response = self.client.access_connectors.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2025-03-01-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_access_connectors_list_by_subscription(self, resource_group):
+ response = self.client.access_connectors.list_by_subscription(
+ api_version="2025-03-01-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_operations.py b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_operations.py
new file mode 100644
index 000000000000..ff4f373de335
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_operations.py
@@ -0,0 +1,29 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.databricks import AzureDatabricksManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestAzureDatabricksManagementOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(AzureDatabricksManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_operations_list(self, resource_group):
+ response = self.client.operations.list(
+ api_version="2025-03-01-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_operations_async.py b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_operations_async.py
new file mode 100644
index 000000000000..fdfaddcf8bfc
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_operations_async.py
@@ -0,0 +1,30 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.databricks.aio import AzureDatabricksManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestAzureDatabricksManagementOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(AzureDatabricksManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_operations_list(self, resource_group):
+ response = self.client.operations.list(
+ api_version="2025-03-01-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_outbound_network_dependencies_endpoints_operations.py b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_outbound_network_dependencies_endpoints_operations.py
new file mode 100644
index 000000000000..8d1b37baf33e
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_outbound_network_dependencies_endpoints_operations.py
@@ -0,0 +1,31 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.databricks import AzureDatabricksManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestAzureDatabricksManagementOutboundNetworkDependenciesEndpointsOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(AzureDatabricksManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_outbound_network_dependencies_endpoints_list(self, resource_group):
+ response = self.client.outbound_network_dependencies_endpoints.list(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ api_version="2025-03-01-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_outbound_network_dependencies_endpoints_operations_async.py b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_outbound_network_dependencies_endpoints_operations_async.py
new file mode 100644
index 000000000000..b3834301a3aa
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_outbound_network_dependencies_endpoints_operations_async.py
@@ -0,0 +1,32 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.databricks.aio import AzureDatabricksManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestAzureDatabricksManagementOutboundNetworkDependenciesEndpointsOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(AzureDatabricksManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_outbound_network_dependencies_endpoints_list(self, resource_group):
+ response = await self.client.outbound_network_dependencies_endpoints.list(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ api_version="2025-03-01-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_private_endpoint_connections_operations.py b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_private_endpoint_connections_operations.py
new file mode 100644
index 000000000000..96fbc3b7ec00
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_private_endpoint_connections_operations.py
@@ -0,0 +1,85 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.databricks import AzureDatabricksManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestAzureDatabricksManagementPrivateEndpointConnectionsOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(AzureDatabricksManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_private_endpoint_connections_list(self, resource_group):
+ response = self.client.private_endpoint_connections.list(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ api_version="2025-03-01-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_private_endpoint_connections_get(self, resource_group):
+ response = self.client.private_endpoint_connections.get(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ private_endpoint_connection_name="str",
+ api_version="2025-03-01-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_private_endpoint_connections_begin_create(self, resource_group):
+ response = self.client.private_endpoint_connections.begin_create(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ private_endpoint_connection_name="str",
+ private_endpoint_connection={
+ "properties": {
+ "privateLinkServiceConnectionState": {
+ "status": "str",
+ "actionsRequired": "str",
+ "description": "str",
+ },
+ "groupIds": ["str"],
+ "privateEndpoint": {"id": "str"},
+ "provisioningState": "str",
+ },
+ "id": "str",
+ "name": "str",
+ "type": "str",
+ },
+ api_version="2025-03-01-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_private_endpoint_connections_begin_delete(self, resource_group):
+ response = self.client.private_endpoint_connections.begin_delete(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ private_endpoint_connection_name="str",
+ api_version="2025-03-01-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_private_endpoint_connections_operations_async.py b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_private_endpoint_connections_operations_async.py
new file mode 100644
index 000000000000..bc3d65f3d877
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_private_endpoint_connections_operations_async.py
@@ -0,0 +1,90 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.databricks.aio import AzureDatabricksManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestAzureDatabricksManagementPrivateEndpointConnectionsOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(AzureDatabricksManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_private_endpoint_connections_list(self, resource_group):
+ response = self.client.private_endpoint_connections.list(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ api_version="2025-03-01-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_private_endpoint_connections_get(self, resource_group):
+ response = await self.client.private_endpoint_connections.get(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ private_endpoint_connection_name="str",
+ api_version="2025-03-01-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_private_endpoint_connections_begin_create(self, resource_group):
+ response = await (
+ await self.client.private_endpoint_connections.begin_create(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ private_endpoint_connection_name="str",
+ private_endpoint_connection={
+ "properties": {
+ "privateLinkServiceConnectionState": {
+ "status": "str",
+ "actionsRequired": "str",
+ "description": "str",
+ },
+ "groupIds": ["str"],
+ "privateEndpoint": {"id": "str"},
+ "provisioningState": "str",
+ },
+ "id": "str",
+ "name": "str",
+ "type": "str",
+ },
+ api_version="2025-03-01-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_private_endpoint_connections_begin_delete(self, resource_group):
+ response = await (
+ await self.client.private_endpoint_connections.begin_delete(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ private_endpoint_connection_name="str",
+ api_version="2025-03-01-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_private_link_resources_operations.py b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_private_link_resources_operations.py
new file mode 100644
index 000000000000..c2ae41f11224
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_private_link_resources_operations.py
@@ -0,0 +1,44 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.databricks import AzureDatabricksManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestAzureDatabricksManagementPrivateLinkResourcesOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(AzureDatabricksManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_private_link_resources_list(self, resource_group):
+ response = self.client.private_link_resources.list(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ api_version="2025-03-01-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_private_link_resources_get(self, resource_group):
+ response = self.client.private_link_resources.get(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ group_id="str",
+ api_version="2025-03-01-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_private_link_resources_operations_async.py b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_private_link_resources_operations_async.py
new file mode 100644
index 000000000000..2770c3b71c73
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_private_link_resources_operations_async.py
@@ -0,0 +1,45 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.databricks.aio import AzureDatabricksManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestAzureDatabricksManagementPrivateLinkResourcesOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(AzureDatabricksManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_private_link_resources_list(self, resource_group):
+ response = self.client.private_link_resources.list(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ api_version="2025-03-01-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_private_link_resources_get(self, resource_group):
+ response = await self.client.private_link_resources.get(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ group_id="str",
+ api_version="2025-03-01-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_vnet_peering_operations.py b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_vnet_peering_operations.py
new file mode 100644
index 000000000000..9abfe802c3f5
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_vnet_peering_operations.py
@@ -0,0 +1,85 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.databricks import AzureDatabricksManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestAzureDatabricksManagementVNetPeeringOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(AzureDatabricksManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_vnet_peering_get(self, resource_group):
+ response = self.client.vnet_peering.get(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ peering_name="str",
+ api_version="2025-03-01-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_vnet_peering_begin_delete(self, resource_group):
+ response = self.client.vnet_peering.begin_delete(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ peering_name="str",
+ api_version="2025-03-01-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_vnet_peering_begin_create_or_update(self, resource_group):
+ response = self.client.vnet_peering.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ peering_name="str",
+ virtual_network_peering_parameters={
+ "remoteVirtualNetwork": {"id": "str"},
+ "allowForwardedTraffic": bool,
+ "allowGatewayTransit": bool,
+ "allowVirtualNetworkAccess": bool,
+ "databricksAddressSpace": {"addressPrefixes": ["str"]},
+ "databricksVirtualNetwork": {"id": "str"},
+ "id": "str",
+ "name": "str",
+ "peeringState": "str",
+ "provisioningState": "str",
+ "remoteAddressSpace": {"addressPrefixes": ["str"]},
+ "type": "str",
+ "useRemoteGateways": bool,
+ },
+ api_version="2025-03-01-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_vnet_peering_list_by_workspace(self, resource_group):
+ response = self.client.vnet_peering.list_by_workspace(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ api_version="2025-03-01-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_vnet_peering_operations_async.py b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_vnet_peering_operations_async.py
new file mode 100644
index 000000000000..f1469713e119
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_vnet_peering_operations_async.py
@@ -0,0 +1,90 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.databricks.aio import AzureDatabricksManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestAzureDatabricksManagementVNetPeeringOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(AzureDatabricksManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_vnet_peering_get(self, resource_group):
+ response = await self.client.vnet_peering.get(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ peering_name="str",
+ api_version="2025-03-01-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_vnet_peering_begin_delete(self, resource_group):
+ response = await (
+ await self.client.vnet_peering.begin_delete(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ peering_name="str",
+ api_version="2025-03-01-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_vnet_peering_begin_create_or_update(self, resource_group):
+ response = await (
+ await self.client.vnet_peering.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ peering_name="str",
+ virtual_network_peering_parameters={
+ "remoteVirtualNetwork": {"id": "str"},
+ "allowForwardedTraffic": bool,
+ "allowGatewayTransit": bool,
+ "allowVirtualNetworkAccess": bool,
+ "databricksAddressSpace": {"addressPrefixes": ["str"]},
+ "databricksVirtualNetwork": {"id": "str"},
+ "id": "str",
+ "name": "str",
+ "peeringState": "str",
+ "provisioningState": "str",
+ "remoteAddressSpace": {"addressPrefixes": ["str"]},
+ "type": "str",
+ "useRemoteGateways": bool,
+ },
+ api_version="2025-03-01-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_vnet_peering_list_by_workspace(self, resource_group):
+ response = self.client.vnet_peering.list_by_workspace(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ api_version="2025-03-01-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_workspaces_operations.py b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_workspaces_operations.py
new file mode 100644
index 000000000000..c05e0d147e1f
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_workspaces_operations.py
@@ -0,0 +1,179 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.databricks import AzureDatabricksManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestAzureDatabricksManagementWorkspacesOperations(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(AzureDatabricksManagementClient)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_workspaces_get(self, resource_group):
+ response = self.client.workspaces.get(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ api_version="2025-03-01-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_workspaces_begin_delete(self, resource_group):
+ response = self.client.workspaces.begin_delete(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ api_version="2025-03-01-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_workspaces_begin_create_or_update(self, resource_group):
+ response = self.client.workspaces.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ parameters={
+ "location": "str",
+ "managedResourceGroupId": "str",
+ "accessConnector": {"id": "str", "identityType": "str", "userAssignedIdentityId": "str"},
+ "authorizations": [{"principalId": "str", "roleDefinitionId": "str"}],
+ "createdBy": {"applicationId": "str", "oid": "str", "puid": "str"},
+ "createdDateTime": "2020-02-20 00:00:00",
+ "defaultCatalog": {"initialName": "str", "initialType": "HiveMetastore"},
+ "defaultStorageFirewall": "str",
+ "diskEncryptionSetId": "str",
+ "encryption": {
+ "entities": {
+ "managedDisk": {
+ "keySource": "str",
+ "keyVaultProperties": {"keyName": "str", "keyVaultUri": "str", "keyVersion": "str"},
+ "rotationToLatestKeyVersionEnabled": bool,
+ },
+ "managedServices": {
+ "keySource": "str",
+ "keyVaultProperties": {"keyName": "str", "keyVaultUri": "str", "keyVersion": "str"},
+ },
+ }
+ },
+ "enhancedSecurityCompliance": {
+ "automaticClusterUpdate": {"value": "str"},
+ "complianceSecurityProfile": {"complianceStandards": ["str"], "value": "str"},
+ "enhancedSecurityMonitoring": {"value": "str"},
+ },
+ "id": "str",
+ "isUcEnabled": bool,
+ "managedDiskIdentity": {"principalId": "str", "tenantId": "str", "type": "str"},
+ "name": "str",
+ "parameters": {
+ "amlWorkspaceId": {"value": "str", "type": "str"},
+ "customPrivateSubnetName": {"value": "str", "type": "str"},
+ "customPublicSubnetName": {"value": "str", "type": "str"},
+ "customVirtualNetworkId": {"value": "str", "type": "str"},
+ "enableNoPublicIp": {"value": bool, "type": "str"},
+ "encryption": {
+ "type": "str",
+ "value": {"KeyName": "str", "keySource": "Default", "keyvaulturi": "str", "keyversion": "str"},
+ },
+ "loadBalancerBackendPoolName": {"value": "str", "type": "str"},
+ "loadBalancerId": {"value": "str", "type": "str"},
+ "natGatewayName": {"value": "str", "type": "str"},
+ "prepareEncryption": {"value": bool, "type": "str"},
+ "publicIpName": {"value": "str", "type": "str"},
+ "requireInfrastructureEncryption": {"value": bool, "type": "str"},
+ "resourceTags": {"value": {}, "type": "str"},
+ "storageAccountName": {"value": "str", "type": "str"},
+ "storageAccountSkuName": {"value": "str", "type": "str"},
+ "vnetAddressPrefix": {"value": "str", "type": "str"},
+ },
+ "privateEndpointConnections": [
+ {
+ "properties": {
+ "privateLinkServiceConnectionState": {
+ "status": "str",
+ "actionsRequired": "str",
+ "description": "str",
+ },
+ "groupIds": ["str"],
+ "privateEndpoint": {"id": "str"},
+ "provisioningState": "str",
+ },
+ "id": "str",
+ "name": "str",
+ "type": "str",
+ }
+ ],
+ "provisioningState": "str",
+ "publicNetworkAccess": "str",
+ "requiredNsgRules": "str",
+ "sku": {"name": "str", "tier": "str"},
+ "storageAccountIdentity": {"principalId": "str", "tenantId": "str", "type": "str"},
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ "uiDefinitionUri": "str",
+ "updatedBy": {"applicationId": "str", "oid": "str", "puid": "str"},
+ "workspaceId": "str",
+ "workspaceUrl": "str",
+ },
+ api_version="2025-03-01-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_workspaces_begin_update(self, resource_group):
+ response = self.client.workspaces.begin_update(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ parameters={"tags": {"str": "str"}},
+ api_version="2025-03-01-preview",
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_workspaces_list_by_resource_group(self, resource_group):
+ response = self.client.workspaces.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2025-03-01-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy
+ def test_workspaces_list_by_subscription(self, resource_group):
+ response = self.client.workspaces.list_by_subscription(
+ api_version="2025-03-01-preview",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_workspaces_operations_async.py b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_workspaces_operations_async.py
new file mode 100644
index 000000000000..99a0ed836152
--- /dev/null
+++ b/sdk/databricks/azure-mgmt-databricks/generated_tests/test_azure_databricks_management_workspaces_operations_async.py
@@ -0,0 +1,191 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from azure.mgmt.databricks.aio import AzureDatabricksManagementClient
+
+from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer
+from devtools_testutils.aio import recorded_by_proxy_async
+
+AZURE_LOCATION = "eastus"
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestAzureDatabricksManagementWorkspacesOperationsAsync(AzureMgmtRecordedTestCase):
+ def setup_method(self, method):
+ self.client = self.create_mgmt_client(AzureDatabricksManagementClient, is_async=True)
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_workspaces_get(self, resource_group):
+ response = await self.client.workspaces.get(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ api_version="2025-03-01-preview",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_workspaces_begin_delete(self, resource_group):
+ response = await (
+ await self.client.workspaces.begin_delete(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ api_version="2025-03-01-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_workspaces_begin_create_or_update(self, resource_group):
+ response = await (
+ await self.client.workspaces.begin_create_or_update(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ parameters={
+ "location": "str",
+ "managedResourceGroupId": "str",
+ "accessConnector": {"id": "str", "identityType": "str", "userAssignedIdentityId": "str"},
+ "authorizations": [{"principalId": "str", "roleDefinitionId": "str"}],
+ "createdBy": {"applicationId": "str", "oid": "str", "puid": "str"},
+ "createdDateTime": "2020-02-20 00:00:00",
+ "defaultCatalog": {"initialName": "str", "initialType": "HiveMetastore"},
+ "defaultStorageFirewall": "str",
+ "diskEncryptionSetId": "str",
+ "encryption": {
+ "entities": {
+ "managedDisk": {
+ "keySource": "str",
+ "keyVaultProperties": {"keyName": "str", "keyVaultUri": "str", "keyVersion": "str"},
+ "rotationToLatestKeyVersionEnabled": bool,
+ },
+ "managedServices": {
+ "keySource": "str",
+ "keyVaultProperties": {"keyName": "str", "keyVaultUri": "str", "keyVersion": "str"},
+ },
+ }
+ },
+ "enhancedSecurityCompliance": {
+ "automaticClusterUpdate": {"value": "str"},
+ "complianceSecurityProfile": {"complianceStandards": ["str"], "value": "str"},
+ "enhancedSecurityMonitoring": {"value": "str"},
+ },
+ "id": "str",
+ "isUcEnabled": bool,
+ "managedDiskIdentity": {"principalId": "str", "tenantId": "str", "type": "str"},
+ "name": "str",
+ "parameters": {
+ "amlWorkspaceId": {"value": "str", "type": "str"},
+ "customPrivateSubnetName": {"value": "str", "type": "str"},
+ "customPublicSubnetName": {"value": "str", "type": "str"},
+ "customVirtualNetworkId": {"value": "str", "type": "str"},
+ "enableNoPublicIp": {"value": bool, "type": "str"},
+ "encryption": {
+ "type": "str",
+ "value": {
+ "KeyName": "str",
+ "keySource": "Default",
+ "keyvaulturi": "str",
+ "keyversion": "str",
+ },
+ },
+ "loadBalancerBackendPoolName": {"value": "str", "type": "str"},
+ "loadBalancerId": {"value": "str", "type": "str"},
+ "natGatewayName": {"value": "str", "type": "str"},
+ "prepareEncryption": {"value": bool, "type": "str"},
+ "publicIpName": {"value": "str", "type": "str"},
+ "requireInfrastructureEncryption": {"value": bool, "type": "str"},
+ "resourceTags": {"value": {}, "type": "str"},
+ "storageAccountName": {"value": "str", "type": "str"},
+ "storageAccountSkuName": {"value": "str", "type": "str"},
+ "vnetAddressPrefix": {"value": "str", "type": "str"},
+ },
+ "privateEndpointConnections": [
+ {
+ "properties": {
+ "privateLinkServiceConnectionState": {
+ "status": "str",
+ "actionsRequired": "str",
+ "description": "str",
+ },
+ "groupIds": ["str"],
+ "privateEndpoint": {"id": "str"},
+ "provisioningState": "str",
+ },
+ "id": "str",
+ "name": "str",
+ "type": "str",
+ }
+ ],
+ "provisioningState": "str",
+ "publicNetworkAccess": "str",
+ "requiredNsgRules": "str",
+ "sku": {"name": "str", "tier": "str"},
+ "storageAccountIdentity": {"principalId": "str", "tenantId": "str", "type": "str"},
+ "systemData": {
+ "createdAt": "2020-02-20 00:00:00",
+ "createdBy": "str",
+ "createdByType": "str",
+ "lastModifiedAt": "2020-02-20 00:00:00",
+ "lastModifiedBy": "str",
+ "lastModifiedByType": "str",
+ },
+ "tags": {"str": "str"},
+ "type": "str",
+ "uiDefinitionUri": "str",
+ "updatedBy": {"applicationId": "str", "oid": "str", "puid": "str"},
+ "workspaceId": "str",
+ "workspaceUrl": "str",
+ },
+ api_version="2025-03-01-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_workspaces_begin_update(self, resource_group):
+ response = await (
+ await self.client.workspaces.begin_update(
+ resource_group_name=resource_group.name,
+ workspace_name="str",
+ parameters={"tags": {"str": "str"}},
+ api_version="2025-03-01-preview",
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_workspaces_list_by_resource_group(self, resource_group):
+ response = self.client.workspaces.list_by_resource_group(
+ resource_group_name=resource_group.name,
+ api_version="2025-03-01-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
+ @recorded_by_proxy_async
+ async def test_workspaces_list_by_subscription(self, resource_group):
+ response = self.client.workspaces.list_by_subscription(
+ api_version="2025-03-01-preview",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/databricks/azure-mgmt-databricks/sdk_packaging.toml b/sdk/databricks/azure-mgmt-databricks/sdk_packaging.toml
index d0a9e6621d47..950538bf7cf8 100644
--- a/sdk/databricks/azure-mgmt-databricks/sdk_packaging.toml
+++ b/sdk/databricks/azure-mgmt-databricks/sdk_packaging.toml
@@ -3,6 +3,6 @@ package_name = "azure-mgmt-databricks"
package_nspkg = "azure-mgmt-nspkg"
package_pprint_name = "Data Bricks Management"
package_doc_id = ""
-is_stable = true
+is_stable = false
is_arm = true
title = "AzureDatabricksManagementClient"
diff --git a/sdk/databricks/azure-mgmt-databricks/setup.py b/sdk/databricks/azure-mgmt-databricks/setup.py
index 7b2212e87e3f..07786058ea80 100644
--- a/sdk/databricks/azure-mgmt-databricks/setup.py
+++ b/sdk/databricks/azure-mgmt-databricks/setup.py
@@ -49,15 +49,15 @@
url="https://github.com/Azure/azure-sdk-for-python",
keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product
classifiers=[
- "Development Status :: 5 - Production/Stable",
+ "Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
"License :: OSI Approved :: MIT License",
],
zip_safe=False,
@@ -74,10 +74,10 @@
"pytyped": ["py.typed"],
},
install_requires=[
- "isodate<1.0.0,>=0.6.1",
- "azure-common~=1.1",
- "azure-mgmt-core>=1.3.2,<2.0.0",
- "typing-extensions>=4.3.0; python_version<'3.8.0'",
+ "isodate>=0.6.1",
+ "typing-extensions>=4.6.0",
+ "azure-common>=1.1",
+ "azure-mgmt-core>=1.3.2",
],
- python_requires=">=3.7",
+ python_requires=">=3.8",
)