From 96ae46aad95c5c4818a071159ec0ada2934a12d3 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Tue, 22 Jul 2025 13:12:40 +0530 Subject: [PATCH] TEST DO NOT MERGE --- .codegen/_openapi_sha | 2 +- .gitattributes | 1 + databricks/sdk/__init__.py | 31 +- databricks/sdk/service/catalog.py | 494 +++++++++++++++++++- databricks/sdk/service/compute.py | 448 ++++++++++++++++++ databricks/sdk/service/dashboards.py | 512 +++++++++++++++++++++ databricks/sdk/service/database.py | 279 ++++++++++- databricks/sdk/service/jobs.py | 101 +++- databricks/sdk/service/ml.py | 44 ++ databricks/sdk/service/pipelines.py | 54 +++ databricks/sdk/service/qualitymonitorv2.py | 14 + databricks/sdk/service/serving.py | 8 + databricks/sdk/service/settings.py | 8 +- databricks/sdk/service/settingsv2.py | 236 ++++++++++ databricks/sdk/service/sharing.py | 34 +- databricks/sdk/service/vectorsearch.py | 85 +++- 16 files changed, 2338 insertions(+), 13 deletions(-) create mode 100755 databricks/sdk/service/settingsv2.py diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 62eb1dbba..3361d4966 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -69902d1abe35bd9e78e0231927bf14d11b383a16 \ No newline at end of file +file:/Users/tanmay.rustagi/emu/universe/bazel-bin/openapi/all-internal.json \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 3bfcfa2e1..220c4be2e 100755 --- a/.gitattributes +++ b/.gitattributes @@ -20,6 +20,7 @@ databricks/sdk/service/provisioning.py linguist-generated=true databricks/sdk/service/qualitymonitorv2.py linguist-generated=true databricks/sdk/service/serving.py linguist-generated=true databricks/sdk/service/settings.py linguist-generated=true +databricks/sdk/service/settingsv2.py linguist-generated=true databricks/sdk/service/sharing.py linguist-generated=true databricks/sdk/service/sql.py linguist-generated=true databricks/sdk/service/vectorsearch.py linguist-generated=true diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index deb1f7785..c558bc11e 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -32,6 +32,7 @@ from databricks.sdk.service import qualitymonitorv2 as pkg_qualitymonitorv2 from databricks.sdk.service import serving as pkg_serving from databricks.sdk.service import settings as pkg_settings +from databricks.sdk.service import settingsv2 as pkg_settingsv2 from databricks.sdk.service import sharing as pkg_sharing from databricks.sdk.service import sql as pkg_sql from databricks.sdk.service import vectorsearch as pkg_vectorsearch @@ -53,6 +54,7 @@ ModelVersionsAPI, OnlineTablesAPI, QualityMonitorsAPI, RegisteredModelsAPI, + RequestForAccessAPI, ResourceQuotasAPI, SchemasAPI, StorageCredentialsAPI, SystemSchemasAPI, @@ -70,7 +72,8 @@ PolicyComplianceForClustersAPI, PolicyFamiliesAPI) from databricks.sdk.service.dashboards import (GenieAPI, LakeviewAPI, - LakeviewEmbeddedAPI) + LakeviewEmbeddedAPI, + QueryExecutionAPI) from databricks.sdk.service.database import DatabaseAPI from databricks.sdk.service.files import DbfsAPI, FilesAPI from databricks.sdk.service.iam import (AccessControlAPI, @@ -125,6 +128,8 @@ RestrictWorkspaceAdminsAPI, SettingsAPI, SqlResultsDownloadAPI, TokenManagementAPI, TokensAPI, WorkspaceConfAPI, WorkspaceNetworkConfigurationAPI) +from databricks.sdk.service.settingsv2 import (AccountSettingsV2API, + WorkspaceSettingsV2API) from databricks.sdk.service.sharing import (ProvidersAPI, RecipientActivationAPI, RecipientFederationPoliciesAPI, @@ -313,6 +318,7 @@ def __init__( self._quality_monitors = pkg_catalog.QualityMonitorsAPI(self._api_client) self._queries = pkg_sql.QueriesAPI(self._api_client) self._queries_legacy = pkg_sql.QueriesLegacyAPI(self._api_client) + self._query_execution = pkg_dashboards.QueryExecutionAPI(self._api_client) self._query_history = pkg_sql.QueryHistoryAPI(self._api_client) self._query_visualizations = pkg_sql.QueryVisualizationsAPI(self._api_client) self._query_visualizations_legacy = pkg_sql.QueryVisualizationsLegacyAPI(self._api_client) @@ -322,6 +328,7 @@ def __init__( self._redash_config = pkg_sql.RedashConfigAPI(self._api_client) self._registered_models = pkg_catalog.RegisteredModelsAPI(self._api_client) self._repos = pkg_workspace.ReposAPI(self._api_client) + self._request_for_access = pkg_catalog.RequestForAccessAPI(self._api_client) self._resource_quotas = pkg_catalog.ResourceQuotasAPI(self._api_client) self._schemas = pkg_catalog.SchemasAPI(self._api_client) self._secrets = pkg_workspace.SecretsAPI(self._api_client) @@ -352,6 +359,7 @@ def __init__( self._workspace = WorkspaceExt(self._api_client) self._workspace_bindings = pkg_catalog.WorkspaceBindingsAPI(self._api_client) self._workspace_conf = pkg_settings.WorkspaceConfAPI(self._api_client) + self._workspace_settings_v2 = pkg_settingsv2.WorkspaceSettingsV2API(self._api_client) self._forecasting = pkg_ml.ForecastingAPI(self._api_client) @property @@ -731,6 +739,11 @@ def queries_legacy(self) -> pkg_sql.QueriesLegacyAPI: """These endpoints are used for CRUD operations on query definitions.""" return self._queries_legacy + @property + def query_execution(self) -> pkg_dashboards.QueryExecutionAPI: + """Query execution APIs for AI / BI Dashboards.""" + return self._query_execution + @property def query_history(self) -> pkg_sql.QueryHistoryAPI: """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute.""" @@ -776,6 +789,11 @@ def repos(self) -> pkg_workspace.ReposAPI: """The Repos API allows users to manage their git repos.""" return self._repos + @property + def request_for_access(self) -> pkg_catalog.RequestForAccessAPI: + """Request for Access enables customers to request access to and manage access request destinations for Unity Catalog securables.""" + return self._request_for_access + @property def resource_quotas(self) -> pkg_catalog.ResourceQuotasAPI: """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created.""" @@ -901,6 +919,11 @@ def workspace_conf(self) -> pkg_settings.WorkspaceConfAPI: """This API allows updating known workspace settings for advanced users.""" return self._workspace_conf + @property + def workspace_settings_v2(self) -> pkg_settingsv2.WorkspaceSettingsV2API: + """APIs to manage workspace level settings.""" + return self._workspace_settings_v2 + @property def forecasting(self) -> pkg_ml.ForecastingAPI: """The Forecasting API allows you to create and get serverless forecasting experiments.""" @@ -1002,6 +1025,7 @@ def __init__( self._service_principal_secrets = pkg_oauth2.ServicePrincipalSecretsAPI(self._api_client) self._service_principals = pkg_iam.AccountServicePrincipalsAPI(self._api_client) self._settings = pkg_settings.AccountSettingsAPI(self._api_client) + self._settings_v2 = pkg_settingsv2.AccountSettingsV2API(self._api_client) self._storage = pkg_provisioning.StorageAPI(self._api_client) self._storage_credentials = pkg_catalog.AccountStorageCredentialsAPI(self._api_client) self._usage_dashboards = pkg_billing.UsageDashboardsAPI(self._api_client) @@ -1130,6 +1154,11 @@ def settings(self) -> pkg_settings.AccountSettingsAPI: """Accounts Settings API allows users to manage settings at the account level.""" return self._settings + @property + def settings_v2(self) -> pkg_settingsv2.AccountSettingsV2API: + """APIs to manage account level settings.""" + return self._settings_v2 + @property def storage(self) -> pkg_provisioning.StorageAPI: """These APIs manage storage configurations for this workspace.""" diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 3fe7b66bb..8c4a23ad2 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -19,6 +19,50 @@ # all definitions in this file are in alphabetical order +@dataclass +class AccessRequestDestinations: + destinations: List[NotificationDestination] + """The access request destinations for the securable.""" + + securable: Securable + """The securable for which the access request destinations are being retrieved.""" + + are_any_destinations_hidden: Optional[bool] = None + """Indicates whether any destinations are hidden from the caller due to a lack of permissions. This + value is true if the caller does not have permission to see all destinations.""" + + def as_dict(self) -> dict: + """Serializes the AccessRequestDestinations into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.are_any_destinations_hidden is not None: + body["are_any_destinations_hidden"] = self.are_any_destinations_hidden + if self.destinations: + body["destinations"] = [v.as_dict() for v in self.destinations] + if self.securable: + body["securable"] = self.securable.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccessRequestDestinations into a shallow dictionary of its immediate attributes.""" + body = {} + if self.are_any_destinations_hidden is not None: + body["are_any_destinations_hidden"] = self.are_any_destinations_hidden + if self.destinations: + body["destinations"] = self.destinations + if self.securable: + body["securable"] = self.securable + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccessRequestDestinations: + """Deserializes the AccessRequestDestinations from a dictionary.""" + return cls( + are_any_destinations_hidden=d.get("are_any_destinations_hidden", None), + destinations=_repeated_dict(d, "destinations", NotificationDestination), + securable=_from_dict(d, "securable", Securable), + ) + + @dataclass class AccountsMetastoreAssignment: metastore_assignment: Optional[MetastoreAssignment] = None @@ -706,6 +750,31 @@ def from_dict(cls, d: Dict[str, Any]) -> AzureUserDelegationSas: return cls(sas_token=d.get("sas_token", None)) +@dataclass +class BatchCreateAccessRequestsResponse: + responses: Optional[List[CreateAccessRequestResponse]] = None + """The access request destinations for each securable object the principal requested.""" + + def as_dict(self) -> dict: + """Serializes the BatchCreateAccessRequestsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.responses: + body["responses"] = [v.as_dict() for v in self.responses] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the BatchCreateAccessRequestsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.responses: + body["responses"] = self.responses + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> BatchCreateAccessRequestsResponse: + """Deserializes the BatchCreateAccessRequestsResponse from a dictionary.""" + return cls(responses=_repeated_dict(d, "responses", CreateAccessRequestResponse)) + + @dataclass class CancelRefreshResponse: def as_dict(self) -> dict: @@ -1467,6 +1536,94 @@ def from_dict(cls, d: Dict[str, Any]) -> ContinuousUpdateStatus: ) +@dataclass +class CreateAccessRequest: + behalf_of: Optional[Principal] = None + """Optional. The principal this request is for. Empty `behalf_of` defaults to the requester's + identity.""" + + comment: Optional[str] = None + """Optional. Comment associated with the request.""" + + securable_permissions: Optional[List[SecurablePermissions]] = None + """List of securables and their corresponding requested UC privileges.""" + + def as_dict(self) -> dict: + """Serializes the CreateAccessRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.behalf_of: + body["behalf_of"] = self.behalf_of.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.securable_permissions: + body["securable_permissions"] = [v.as_dict() for v in self.securable_permissions] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CreateAccessRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.behalf_of: + body["behalf_of"] = self.behalf_of + if self.comment is not None: + body["comment"] = self.comment + if self.securable_permissions: + body["securable_permissions"] = self.securable_permissions + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateAccessRequest: + """Deserializes the CreateAccessRequest from a dictionary.""" + return cls( + behalf_of=_from_dict(d, "behalf_of", Principal), + comment=d.get("comment", None), + securable_permissions=_repeated_dict(d, "securable_permissions", SecurablePermissions), + ) + + +@dataclass +class CreateAccessRequestResponse: + are_any_destinations_hidden: Optional[bool] = None + """Indicates whether any destinations are hidden from the caller due to a lack of permissions. This + value is true if the caller does not have permission to see all destinations.""" + + behalf_of: Optional[Principal] = None + """The principal the request was made on behalf of.""" + + destinations: Optional[List[SecurableNotificationDestinations]] = None + """The access request destinations for all the securables the principal requested.""" + + def as_dict(self) -> dict: + """Serializes the CreateAccessRequestResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.are_any_destinations_hidden is not None: + body["are_any_destinations_hidden"] = self.are_any_destinations_hidden + if self.behalf_of: + body["behalf_of"] = self.behalf_of.as_dict() + if self.destinations: + body["destinations"] = [v.as_dict() for v in self.destinations] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CreateAccessRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.are_any_destinations_hidden is not None: + body["are_any_destinations_hidden"] = self.are_any_destinations_hidden + if self.behalf_of: + body["behalf_of"] = self.behalf_of + if self.destinations: + body["destinations"] = self.destinations + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateAccessRequestResponse: + """Deserializes the CreateAccessRequestResponse from a dictionary.""" + return cls( + are_any_destinations_hidden=d.get("are_any_destinations_hidden", None), + behalf_of=_from_dict(d, "behalf_of", Principal), + destinations=_repeated_dict(d, "destinations", SecurableNotificationDestinations), + ) + + @dataclass class CreateFunction: name: str @@ -2550,6 +2707,15 @@ def from_dict(cls, d: Dict[str, Any]) -> DependencyList: return cls(dependencies=_repeated_dict(d, "dependencies", Dependency)) +class DestinationType(Enum): + + EMAIL = "EMAIL" + GENERIC_WEBHOOK = "GENERIC_WEBHOOK" + MICROSOFT_TEAMS = "MICROSOFT_TEAMS" + SLACK = "SLACK" + URL = "URL" + + @dataclass class DisableResponse: def as_dict(self) -> dict: @@ -6509,6 +6675,50 @@ def from_dict(cls, d: Dict[str, Any]) -> NamedTableConstraint: return cls(name=d.get("name", None)) +@dataclass +class NotificationDestination: + destination_id: Optional[str] = None + """The unique identifier for the destination.""" + + destination_type: Optional[DestinationType] = None + """The type of the destination.""" + + special_destination: Optional[SpecialDestination] = None + """This field is used to denote whether the destination is the email of the owner of the securable + object.""" + + def as_dict(self) -> dict: + """Serializes the NotificationDestination into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.destination_id is not None: + body["destination_id"] = self.destination_id + if self.destination_type is not None: + body["destination_type"] = self.destination_type.value + if self.special_destination is not None: + body["special_destination"] = self.special_destination.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NotificationDestination into a shallow dictionary of its immediate attributes.""" + body = {} + if self.destination_id is not None: + body["destination_id"] = self.destination_id + if self.destination_type is not None: + body["destination_type"] = self.destination_type + if self.special_destination is not None: + body["special_destination"] = self.special_destination + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> NotificationDestination: + """Deserializes the NotificationDestination from a dictionary.""" + return cls( + destination_id=d.get("destination_id", None), + destination_type=_enum(d, "destination_type", DestinationType), + special_destination=_enum(d, "special_destination", SpecialDestination), + ) + + @dataclass class OnlineTable: """Online Table information.""" @@ -6944,6 +7154,15 @@ class PermissionsChange: """The principal whose privileges we are changing. Only one of principal or principal_id should be specified, never both at the same time.""" + principal_id: Optional[int] = None + """An opaque internal ID that identifies the principal whose privileges should be removed. + + This field is intended for removing privileges associated with a deleted user. When set, only + the entries specified in the remove field are processed; any entries in the add field will be + rejected. + + Only one of principal or principal_id should be specified, never both at the same time.""" + remove: Optional[List[Privilege]] = None """The set of privileges to remove.""" @@ -6954,6 +7173,8 @@ def as_dict(self) -> dict: body["add"] = [v.value for v in self.add] if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = [v.value for v in self.remove] return body @@ -6965,6 +7186,8 @@ def as_shallow_dict(self) -> dict: body["add"] = self.add if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = self.remove return body @@ -6975,6 +7198,7 @@ def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: return cls( add=_repeated_enum(d, "add", Privilege), principal=d.get("principal", None), + principal_id=d.get("principal_id", None), remove=_repeated_enum(d, "remove", Privilege), ) @@ -7092,6 +7316,44 @@ def from_dict(cls, d: Dict[str, Any]) -> PrimaryKeyConstraint: ) +@dataclass +class Principal: + id: Optional[str] = None + """Databricks user, group or service principal ID.""" + + principal_type: Optional[PrincipalType] = None + + def as_dict(self) -> dict: + """Serializes the Principal into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.id is not None: + body["id"] = self.id + if self.principal_type is not None: + body["principal_type"] = self.principal_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Principal into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: + body["id"] = self.id + if self.principal_type is not None: + body["principal_type"] = self.principal_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Principal: + """Deserializes the Principal from a dictionary.""" + return cls(id=d.get("id", None), principal_type=_enum(d, "principal_type", PrincipalType)) + + +class PrincipalType(Enum): + + GROUP_PRINCIPAL = "GROUP_PRINCIPAL" + SERVICE_PRINCIPAL = "SERVICE_PRINCIPAL" + USER_PRINCIPAL = "USER_PRINCIPAL" + + class Privilege(Enum): ACCESS = "ACCESS" @@ -7151,6 +7413,10 @@ class PrivilegeAssignment: """The principal (user email address or group name). For deleted principals, `principal` is empty while `principal_id` is populated.""" + principal_id: Optional[int] = None + """Unique identifier of the principal. For active principals, both `principal` and `principal_id` + are present.""" + privileges: Optional[List[Privilege]] = None """The privileges assigned to the principal.""" @@ -7159,6 +7425,8 @@ def as_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = [v.value for v in self.privileges] return body @@ -7168,6 +7436,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = self.privileges return body @@ -7175,7 +7445,11 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: """Deserializes the PrivilegeAssignment from a dictionary.""" - return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) + return cls( + principal=d.get("principal", None), + principal_id=d.get("principal_id", None), + privileges=_repeated_enum(d, "privileges", Privilege), + ) @dataclass @@ -7722,6 +7996,53 @@ def from_dict(cls, d: Dict[str, Any]) -> SchemaInfo: ) +@dataclass +class Securable: + """Generic definition of a securable, which is uniquely defined in a metastore by its type and full + name.""" + + full_name: Optional[str] = None + """Required. The full name of the catalog/schema/table. Optional if resource_name is present.""" + + provider_share: Optional[str] = None + """Optional. The name of the Share object that contains the securable when the securable is getting + shared in D2D Delta Sharing.s""" + + type: Optional[SecurableType] = None + """Required. The type of securable (catalog/schema/table). Optional if resource_name is present.""" + + def as_dict(self) -> dict: + """Serializes the Securable into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.full_name is not None: + body["full_name"] = self.full_name + if self.provider_share is not None: + body["provider_share"] = self.provider_share + if self.type is not None: + body["type"] = self.type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Securable into a shallow dictionary of its immediate attributes.""" + body = {} + if self.full_name is not None: + body["full_name"] = self.full_name + if self.provider_share is not None: + body["provider_share"] = self.provider_share + if self.type is not None: + body["type"] = self.type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Securable: + """Deserializes the Securable from a dictionary.""" + return cls( + full_name=d.get("full_name", None), + provider_share=d.get("provider_share", None), + type=_enum(d, "type", SecurableType), + ) + + class SecurableKind(Enum): TABLE_DB_STORAGE = "TABLE_DB_STORAGE" @@ -7853,6 +8174,73 @@ def from_dict(cls, d: Dict[str, Any]) -> SecurableKindManifest: ) +@dataclass +class SecurableNotificationDestinations: + notification_destinations: Optional[NotificationDestination] = None + """The access request destinations for the securable.""" + + securable: Optional[Securable] = None + """The securable for which the access request destinations are being retrieved.""" + + def as_dict(self) -> dict: + """Serializes the SecurableNotificationDestinations into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.notification_destinations: + body["notification_destinations"] = self.notification_destinations.as_dict() + if self.securable: + body["securable"] = self.securable.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SecurableNotificationDestinations into a shallow dictionary of its immediate attributes.""" + body = {} + if self.notification_destinations: + body["notification_destinations"] = self.notification_destinations + if self.securable: + body["securable"] = self.securable + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SecurableNotificationDestinations: + """Deserializes the SecurableNotificationDestinations from a dictionary.""" + return cls( + notification_destinations=_from_dict(d, "notification_destinations", NotificationDestination), + securable=_from_dict(d, "securable", Securable), + ) + + +@dataclass +class SecurablePermissions: + permission: Optional[List[str]] = None + """List of requested Unity Catalog permissions.""" + + securable: Optional[Securable] = None + """The securable for which the access request destinations are being requested.""" + + def as_dict(self) -> dict: + """Serializes the SecurablePermissions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.permission: + body["permission"] = [v for v in self.permission] + if self.securable: + body["securable"] = self.securable.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SecurablePermissions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission: + body["permission"] = self.permission + if self.securable: + body["securable"] = self.securable + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SecurablePermissions: + """Deserializes the SecurablePermissions from a dictionary.""" + return cls(permission=d.get("permission", None), securable=_from_dict(d, "securable", Securable)) + + class SecurableType(Enum): """The type of Unity Catalog securable.""" @@ -7875,6 +8263,15 @@ class SecurableType(Enum): VOLUME = "VOLUME" +class SpecialDestination(Enum): + + SPECIAL_DESTINATION_CATALOG_OWNER = "SPECIAL_DESTINATION_CATALOG_OWNER" + SPECIAL_DESTINATION_CONNECTION_OWNER = "SPECIAL_DESTINATION_CONNECTION_OWNER" + SPECIAL_DESTINATION_CREDENTIAL_OWNER = "SPECIAL_DESTINATION_CREDENTIAL_OWNER" + SPECIAL_DESTINATION_EXTERNAL_LOCATION_OWNER = "SPECIAL_DESTINATION_EXTERNAL_LOCATION_OWNER" + SPECIAL_DESTINATION_METASTORE_OWNER = "SPECIAL_DESTINATION_METASTORE_OWNER" + + @dataclass class SseEncryptionDetails: """Server-Side Encryption properties for clients communicating with AWS s3.""" @@ -11457,6 +11854,7 @@ def get( securable_type: str, full_name: str, *, + include_deleted_principals: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None, principal: Optional[str] = None, @@ -11467,6 +11865,8 @@ def get( Type of securable. :param full_name: str Full name of securable. + :param include_deleted_principals: bool (optional) + Optional. If true, also return privilege assignments whose principals have been deleted. :param max_results: int (optional) Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment present in a single page response is guaranteed to contain all the privileges granted on the @@ -11486,6 +11886,8 @@ def get( """ query = {} + if include_deleted_principals is not None: + query["include_deleted_principals"] = include_deleted_principals if max_results is not None: query["max_results"] = max_results if page_token is not None: @@ -12832,6 +13234,96 @@ def update( return RegisteredModelInfo.from_dict(res) +class RequestForAccessAPI: + """Request for Access enables customers to request access to and manage access request destinations for Unity + Catalog securables. + + These APIs provide a standardized way to update, get, and request to access request destinations. + Fine-grained authorization ensures that only users with appropriate permissions can manage access request + destinations.""" + + def __init__(self, api_client): + self._api = api_client + + def batch_create_access_requests( + self, *, requests: Optional[List[CreateAccessRequest]] = None + ) -> BatchCreateAccessRequestsResponse: + """Creates an access request for a Unity Catalog permissions for a specified principal on a securable + object. This Batch API can take in multiple principals, securable objects, and permissions as the + input and returns the access request destinations for each. + + :param requests: List[:class:`CreateAccessRequest`] (optional) + A list of individual access requests, where each request corresponds to a set of permissions being + requested on a list of securables for a specified principal. + + :returns: :class:`BatchCreateAccessRequestsResponse` + """ + body = {} + if requests is not None: + body["requests"] = [v.as_dict() for v in requests] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/3.0/rfa/requests", body=body, headers=headers) + return BatchCreateAccessRequestsResponse.from_dict(res) + + def get_access_request_destinations(self, securable_type: str, full_name: str) -> AccessRequestDestinations: + """Gets an array of access request destinations for the specified securable. Any caller can see URL + destinations or the destinations on the metastore. Otherwise, only those with **BROWSE** permissions + on the securable can see destinations. + + :param securable_type: str + :param full_name: str + + :returns: :class:`AccessRequestDestinations` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/3.0/rfa/destinations/{securable_type}/{full_name}", headers=headers) + return AccessRequestDestinations.from_dict(res) + + def update_access_request_destinations( + self, access_request_destinations: AccessRequestDestinations, update_mask: str + ) -> AccessRequestDestinations: + """Updates the access request destinations for the given securable. The caller must be a metastore admin, + the owner of the securable, or a user that has the **MANAGE** privilege on the securable in order to + assign destinations. + + :param access_request_destinations: :class:`AccessRequestDestinations` + For each destination, if **special_destination** is defined, then a **destination_id** is not + required. Futhermore, the **destination_type** of a **special_destination** is always **EMAIL**. + Otherwise, a **destination_id** and **destination_type** must be defined. + :param update_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`AccessRequestDestinations` + """ + body = access_request_destinations.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", "/api/3.0/rfa/destinations", query=query, body=body, headers=headers) + return AccessRequestDestinations.from_dict(res) + + class ResourceQuotasAPI: """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 071ac89e0..d2080d48a 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -2718,6 +2718,168 @@ def from_dict(cls, d: Dict[str, Any]) -> DbfsStorageInfo: return cls(destination=d.get("destination", None)) +@dataclass +class DefaultBaseEnvironment: + base_environment_cache: Optional[List[DefaultBaseEnvironmentCache]] = None + + created_timestamp: Optional[int] = None + + creator_user_id: Optional[int] = None + + environment: Optional[Environment] = None + """Note: we made `environment` non-internal because we need to expose its `client` field. All other + fields should be treated as internal.""" + + filepath: Optional[str] = None + + id: Optional[str] = None + + is_default: Optional[bool] = None + + last_updated_timestamp: Optional[int] = None + + last_updated_user_id: Optional[int] = None + + message: Optional[str] = None + + name: Optional[str] = None + + principal_ids: Optional[List[int]] = None + + status: Optional[DefaultBaseEnvironmentCacheStatus] = None + + def as_dict(self) -> dict: + """Serializes the DefaultBaseEnvironment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.base_environment_cache: + body["base_environment_cache"] = [v.as_dict() for v in self.base_environment_cache] + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.creator_user_id is not None: + body["creator_user_id"] = self.creator_user_id + if self.environment: + body["environment"] = self.environment.as_dict() + if self.filepath is not None: + body["filepath"] = self.filepath + if self.id is not None: + body["id"] = self.id + if self.is_default is not None: + body["is_default"] = self.is_default + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.last_updated_user_id is not None: + body["last_updated_user_id"] = self.last_updated_user_id + if self.message is not None: + body["message"] = self.message + if self.name is not None: + body["name"] = self.name + if self.principal_ids: + body["principal_ids"] = [v for v in self.principal_ids] + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DefaultBaseEnvironment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.base_environment_cache: + body["base_environment_cache"] = self.base_environment_cache + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.creator_user_id is not None: + body["creator_user_id"] = self.creator_user_id + if self.environment: + body["environment"] = self.environment + if self.filepath is not None: + body["filepath"] = self.filepath + if self.id is not None: + body["id"] = self.id + if self.is_default is not None: + body["is_default"] = self.is_default + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.last_updated_user_id is not None: + body["last_updated_user_id"] = self.last_updated_user_id + if self.message is not None: + body["message"] = self.message + if self.name is not None: + body["name"] = self.name + if self.principal_ids: + body["principal_ids"] = self.principal_ids + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DefaultBaseEnvironment: + """Deserializes the DefaultBaseEnvironment from a dictionary.""" + return cls( + base_environment_cache=_repeated_dict(d, "base_environment_cache", DefaultBaseEnvironmentCache), + created_timestamp=d.get("created_timestamp", None), + creator_user_id=d.get("creator_user_id", None), + environment=_from_dict(d, "environment", Environment), + filepath=d.get("filepath", None), + id=d.get("id", None), + is_default=d.get("is_default", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + last_updated_user_id=d.get("last_updated_user_id", None), + message=d.get("message", None), + name=d.get("name", None), + principal_ids=d.get("principal_ids", None), + status=_enum(d, "status", DefaultBaseEnvironmentCacheStatus), + ) + + +@dataclass +class DefaultBaseEnvironmentCache: + materialized_environment: Optional[MaterializedEnvironment] = None + + message: Optional[str] = None + + status: Optional[DefaultBaseEnvironmentCacheStatus] = None + + def as_dict(self) -> dict: + """Serializes the DefaultBaseEnvironmentCache into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.materialized_environment: + body["materialized_environment"] = self.materialized_environment.as_dict() + if self.message is not None: + body["message"] = self.message + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DefaultBaseEnvironmentCache into a shallow dictionary of its immediate attributes.""" + body = {} + if self.materialized_environment: + body["materialized_environment"] = self.materialized_environment + if self.message is not None: + body["message"] = self.message + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DefaultBaseEnvironmentCache: + """Deserializes the DefaultBaseEnvironmentCache from a dictionary.""" + return cls( + materialized_environment=_from_dict(d, "materialized_environment", MaterializedEnvironment), + message=d.get("message", None), + status=_enum(d, "status", DefaultBaseEnvironmentCacheStatus), + ) + + +class DefaultBaseEnvironmentCacheStatus(Enum): + + CREATED = "CREATED" + EXPIRED = "EXPIRED" + FAILED = "FAILED" + INVALID = "INVALID" + PENDING = "PENDING" + REFRESHING = "REFRESHING" + + @dataclass class DeleteClusterResponse: def as_dict(self) -> dict: @@ -3809,6 +3971,10 @@ class GetInstancePool: disk_spec: Optional[DiskSpec] = None """Defines the specification of the disks that will be attached to all spark containers.""" + enable_auto_alternate_node_types: Optional[bool] = None + """For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids + are enabled. This field should not be true if node_type_flexibility is set.""" + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature @@ -3838,6 +4004,11 @@ class GetInstancePool: min_idle_instances: Optional[int] = None """Minimum number of idle instances to keep in the instance pool""" + node_type_flexibility: Optional[NodeTypeFlexibility] = None + """For pools with node type flexibility (Fleet-V2), this object contains the information about the + alternate node type ids to use when attempting to launch a cluster if the node type id is not + available. This field should not be set if enable_auto_alternate_node_types is true.""" + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -3882,6 +4053,8 @@ def as_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec.as_dict() + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -3896,6 +4069,8 @@ def as_dict(self) -> dict: body["max_capacity"] = self.max_capacity if self.min_idle_instances is not None: body["min_idle_instances"] = self.min_idle_instances + if self.node_type_flexibility: + body["node_type_flexibility"] = self.node_type_flexibility.as_dict() if self.node_type_id is not None: body["node_type_id"] = self.node_type_id if self.preloaded_docker_images: @@ -3927,6 +4102,8 @@ def as_shallow_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -3941,6 +4118,8 @@ def as_shallow_dict(self) -> dict: body["max_capacity"] = self.max_capacity if self.min_idle_instances is not None: body["min_idle_instances"] = self.min_idle_instances + if self.node_type_flexibility: + body["node_type_flexibility"] = self.node_type_flexibility if self.node_type_id is not None: body["node_type_id"] = self.node_type_id if self.preloaded_docker_images: @@ -3968,6 +4147,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GetInstancePool: custom_tags=d.get("custom_tags", None), default_tags=d.get("default_tags", None), disk_spec=_from_dict(d, "disk_spec", DiskSpec), + enable_auto_alternate_node_types=d.get("enable_auto_alternate_node_types", None), enable_elastic_disk=d.get("enable_elastic_disk", None), gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), @@ -3975,6 +4155,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GetInstancePool: instance_pool_name=d.get("instance_pool_name", None), max_capacity=d.get("max_capacity", None), min_idle_instances=d.get("min_idle_instances", None), + node_type_flexibility=_from_dict(d, "node_type_flexibility", NodeTypeFlexibility), node_type_id=d.get("node_type_id", None), preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage), preloaded_spark_versions=d.get("preloaded_spark_versions", None), @@ -4620,6 +4801,10 @@ class InstancePoolAndStats: disk_spec: Optional[DiskSpec] = None """Defines the specification of the disks that will be attached to all spark containers.""" + enable_auto_alternate_node_types: Optional[bool] = None + """For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids + are enabled. This field should not be true if node_type_flexibility is set.""" + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature @@ -4652,6 +4837,11 @@ class InstancePoolAndStats: min_idle_instances: Optional[int] = None """Minimum number of idle instances to keep in the instance pool""" + node_type_flexibility: Optional[NodeTypeFlexibility] = None + """For pools with node type flexibility (Fleet-V2), this object contains the information about the + alternate node type ids to use when attempting to launch a cluster if the node type id is not + available. This field should not be set if enable_auto_alternate_node_types is true.""" + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -4696,6 +4886,8 @@ def as_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec.as_dict() + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -4710,6 +4902,8 @@ def as_dict(self) -> dict: body["max_capacity"] = self.max_capacity if self.min_idle_instances is not None: body["min_idle_instances"] = self.min_idle_instances + if self.node_type_flexibility: + body["node_type_flexibility"] = self.node_type_flexibility.as_dict() if self.node_type_id is not None: body["node_type_id"] = self.node_type_id if self.preloaded_docker_images: @@ -4741,6 +4935,8 @@ def as_shallow_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -4755,6 +4951,8 @@ def as_shallow_dict(self) -> dict: body["max_capacity"] = self.max_capacity if self.min_idle_instances is not None: body["min_idle_instances"] = self.min_idle_instances + if self.node_type_flexibility: + body["node_type_flexibility"] = self.node_type_flexibility if self.node_type_id is not None: body["node_type_id"] = self.node_type_id if self.preloaded_docker_images: @@ -4782,6 +4980,7 @@ def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAndStats: custom_tags=d.get("custom_tags", None), default_tags=d.get("default_tags", None), disk_spec=_from_dict(d, "disk_spec", DiskSpec), + enable_auto_alternate_node_types=d.get("enable_auto_alternate_node_types", None), enable_elastic_disk=d.get("enable_elastic_disk", None), gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), @@ -4789,6 +4988,7 @@ def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAndStats: instance_pool_name=d.get("instance_pool_name", None), max_capacity=d.get("max_capacity", None), min_idle_instances=d.get("min_idle_instances", None), + node_type_flexibility=_from_dict(d, "node_type_flexibility", NodeTypeFlexibility), node_type_id=d.get("node_type_id", None), preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage), preloaded_spark_versions=d.get("preloaded_spark_versions", None), @@ -5654,6 +5854,39 @@ class ListClustersSortByField(Enum): DEFAULT = "DEFAULT" +@dataclass +class ListDefaultBaseEnvironmentsResponse: + default_base_environments: Optional[List[DefaultBaseEnvironment]] = None + + next_page_token: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the ListDefaultBaseEnvironmentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.default_base_environments: + body["default_base_environments"] = [v.as_dict() for v in self.default_base_environments] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDefaultBaseEnvironmentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.default_base_environments: + body["default_base_environments"] = self.default_base_environments + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDefaultBaseEnvironmentsResponse: + """Deserializes the ListDefaultBaseEnvironmentsResponse from a dictionary.""" + return cls( + default_base_environments=_repeated_dict(d, "default_base_environments", DefaultBaseEnvironment), + next_page_token=d.get("next_page_token", None), + ) + + @dataclass class ListGlobalInitScriptsResponse: scripts: Optional[List[GlobalInitScriptDetails]] = None @@ -5922,6 +6155,44 @@ def from_dict(cls, d: Dict[str, Any]) -> LogSyncStatus: MapAny = Dict[str, Any] +@dataclass +class MaterializedEnvironment: + """Materialized Environment information enables environment sharing and reuse via Environment + Caching during library installations. Currently this feature is only supported for Python + libraries. + + - If the env cache entry in LMv2 DB doesn't exist or invalid, library installations and + environment materialization will occur. A new Materialized Environment metadata will be sent + from DP upon successful library installations and env materialization, and is persisted into + database by LMv2. - If the env cache entry in LMv2 DB is valid, the Materialized Environment + will be sent to DP by LMv2, and DP will restore the cached environment from a store instead of + reinstalling libraries from scratch. + + If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto with new version""" + + last_updated_timestamp: Optional[int] = None + """The timestamp (in epoch milliseconds) when the materialized env is updated.""" + + def as_dict(self) -> dict: + """Serializes the MaterializedEnvironment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + return body + + def as_shallow_dict(self) -> dict: + """Serializes the MaterializedEnvironment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> MaterializedEnvironment: + """Deserializes the MaterializedEnvironment from a dictionary.""" + return cls(last_updated_timestamp=d.get("last_updated_timestamp", None)) + + @dataclass class MavenLibrary: coordinates: str @@ -6216,6 +6487,28 @@ def from_dict(cls, d: Dict[str, Any]) -> NodeType: ) +@dataclass +class NodeTypeFlexibility: + """For Fleet-V2 using classic clusters, this object contains the information about the alternate + node type ids to use when attempting to launch a cluster. It can be used with both the driver + and worker node types.""" + + def as_dict(self) -> dict: + """Serializes the NodeTypeFlexibility into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NodeTypeFlexibility into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> NodeTypeFlexibility: + """Deserializes the NodeTypeFlexibility from a dictionary.""" + return cls() + + @dataclass class PendingInstanceError: """Error message of a failed pending instances""" @@ -6528,6 +6821,24 @@ def from_dict(cls, d: Dict[str, Any]) -> RCranLibrary: return cls(package=d.get("package", None), repo=d.get("repo", None)) +@dataclass +class RefreshDefaultBaseEnvironmentsResponse: + def as_dict(self) -> dict: + """Serializes the RefreshDefaultBaseEnvironmentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RefreshDefaultBaseEnvironmentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RefreshDefaultBaseEnvironmentsResponse: + """Deserializes the RefreshDefaultBaseEnvironmentsResponse from a dictionary.""" + return cls() + + @dataclass class RemoveResponse: def as_dict(self) -> dict: @@ -9815,11 +10126,13 @@ def create( azure_attributes: Optional[InstancePoolAzureAttributes] = None, custom_tags: Optional[Dict[str, str]] = None, disk_spec: Optional[DiskSpec] = None, + enable_auto_alternate_node_types: Optional[bool] = None, enable_elastic_disk: Optional[bool] = None, gcp_attributes: Optional[InstancePoolGcpAttributes] = None, idle_instance_autotermination_minutes: Optional[int] = None, max_capacity: Optional[int] = None, min_idle_instances: Optional[int] = None, + node_type_flexibility: Optional[NodeTypeFlexibility] = None, preloaded_docker_images: Optional[List[DockerImage]] = None, preloaded_spark_versions: Optional[List[str]] = None, remote_disk_throughput: Optional[int] = None, @@ -9848,6 +10161,9 @@ def create( - Currently, Databricks allows at most 45 custom tags :param disk_spec: :class:`DiskSpec` (optional) Defines the specification of the disks that will be attached to all spark containers. + :param enable_auto_alternate_node_types: bool (optional) + For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids are + enabled. This field should not be true if node_type_flexibility is set. :param enable_elastic_disk: bool (optional) Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature @@ -9867,6 +10183,10 @@ def create( upsize requests. :param min_idle_instances: int (optional) Minimum number of idle instances to keep in the instance pool + :param node_type_flexibility: :class:`NodeTypeFlexibility` (optional) + For pools with node type flexibility (Fleet-V2), this object contains the information about the + alternate node type ids to use when attempting to launch a cluster if the node type id is not + available. This field should not be set if enable_auto_alternate_node_types is true. :param preloaded_docker_images: List[:class:`DockerImage`] (optional) Custom Docker Image BYOC :param preloaded_spark_versions: List[str] (optional) @@ -9891,6 +10211,8 @@ def create( body["custom_tags"] = custom_tags if disk_spec is not None: body["disk_spec"] = disk_spec.as_dict() + if enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = enable_auto_alternate_node_types if enable_elastic_disk is not None: body["enable_elastic_disk"] = enable_elastic_disk if gcp_attributes is not None: @@ -9903,6 +10225,8 @@ def create( body["max_capacity"] = max_capacity if min_idle_instances is not None: body["min_idle_instances"] = min_idle_instances + if node_type_flexibility is not None: + body["node_type_flexibility"] = node_type_flexibility.as_dict() if node_type_id is not None: body["node_type_id"] = node_type_id if preloaded_docker_images is not None: @@ -9946,9 +10270,11 @@ def edit( node_type_id: str, *, custom_tags: Optional[Dict[str, str]] = None, + enable_auto_alternate_node_types: Optional[bool] = None, idle_instance_autotermination_minutes: Optional[int] = None, max_capacity: Optional[int] = None, min_idle_instances: Optional[int] = None, + node_type_flexibility: Optional[NodeTypeFlexibility] = None, remote_disk_throughput: Optional[int] = None, total_initial_remote_disk_size: Optional[int] = None, ): @@ -9969,6 +10295,9 @@ def edit( EBS volumes) with these tags in addition to `default_tags`. Notes: - Currently, Databricks allows at most 45 custom tags + :param enable_auto_alternate_node_types: bool (optional) + For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids are + enabled. This field should not be true if node_type_flexibility is set. :param idle_instance_autotermination_minutes: int (optional) Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances @@ -9981,6 +10310,10 @@ def edit( upsize requests. :param min_idle_instances: int (optional) Minimum number of idle instances to keep in the instance pool + :param node_type_flexibility: :class:`NodeTypeFlexibility` (optional) + For pools with node type flexibility (Fleet-V2), this object contains the information about the + alternate node type ids to use when attempting to launch a cluster if the node type id is not + available. This field should not be set if enable_auto_alternate_node_types is true. :param remote_disk_throughput: int (optional) If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only supported for GCP HYPERDISK_BALANCED types. @@ -9993,6 +10326,8 @@ def edit( body = {} if custom_tags is not None: body["custom_tags"] = custom_tags + if enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = enable_auto_alternate_node_types if idle_instance_autotermination_minutes is not None: body["idle_instance_autotermination_minutes"] = idle_instance_autotermination_minutes if instance_pool_id is not None: @@ -10003,6 +10338,8 @@ def edit( body["max_capacity"] = max_capacity if min_idle_instances is not None: body["min_idle_instances"] = min_idle_instances + if node_type_flexibility is not None: + body["node_type_flexibility"] = node_type_flexibility.as_dict() if node_type_id is not None: body["node_type_id"] = node_type_id if remote_disk_throughput is not None: @@ -10347,6 +10684,48 @@ def cluster_status(self, cluster_id: str) -> Iterator[LibraryFullStatus]: parsed = ClusterLibraryStatuses.from_dict(json).library_statuses return parsed if parsed is not None else [] + def create_default_base_environment( + self, default_base_environment: DefaultBaseEnvironment, *, request_id: Optional[str] = None + ) -> DefaultBaseEnvironment: + """Create a default base environment within workspaces to define the environment version and a list of + dependencies to be used in serverless notebooks and jobs. This process will asynchronously generate a + cache to optimize dependency resolution. + + :param default_base_environment: :class:`DefaultBaseEnvironment` + :param request_id: str (optional) + A unique identifier for this request. A random UUID is recommended. This request is only idempotent + if a `request_id` is provided. + + :returns: :class:`DefaultBaseEnvironment` + """ + body = {} + if default_base_environment is not None: + body["default_base_environment"] = default_base_environment.as_dict() + if request_id is not None: + body["request_id"] = request_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/default-base-environments", body=body, headers=headers) + return DefaultBaseEnvironment.from_dict(res) + + def delete_default_base_environment(self, id: str): + """Delete the default base environment given an ID. The default base environment may be used by + downstream workloads. Please ensure that the deletion is intentional. + + :param id: str + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/default-base-environments/{id}", headers=headers) + def install(self, cluster_id: str, libraries: List[Library]): """Add libraries to install on a cluster. The installation is asynchronous; it happens in the background after the completion of this request. @@ -10370,6 +10749,53 @@ def install(self, cluster_id: str, libraries: List[Library]): self._api.do("POST", "/api/2.0/libraries/install", body=body, headers=headers) + def list_default_base_environments( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[DefaultBaseEnvironment]: + """List default base environments defined in the workspaces for the requested user. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`DefaultBaseEnvironment` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do("GET", "/api/2.0/default-base-environments", query=query, headers=headers) + if "default_base_environments" in json: + for v in json["default_base_environments"]: + yield DefaultBaseEnvironment.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def refresh_default_base_environments(self, ids: List[str]): + """Refresh the cached default base environments for the given IDs. This process will asynchronously + regenerate the caches. The existing caches remains available until it expires. + + :param ids: List[str] + + + """ + body = {} + if ids is not None: + body["ids"] = [v for v in ids] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/default-base-environments/refresh", body=body, headers=headers) + def uninstall(self, cluster_id: str, libraries: List[Library]): """Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is restarted. A request to uninstall a library that is not currently installed is ignored. @@ -10393,6 +10819,28 @@ def uninstall(self, cluster_id: str, libraries: List[Library]): self._api.do("POST", "/api/2.0/libraries/uninstall", body=body, headers=headers) + def update_default_base_environment( + self, id: str, *, default_base_environment: Optional[DefaultBaseEnvironment] = None + ) -> DefaultBaseEnvironment: + """Update the default base environment for the given ID. This process will asynchronously regenerate the + cache. The existing cache remains available until it expires. + + :param id: str + :param default_base_environment: :class:`DefaultBaseEnvironment` (optional) + + :returns: :class:`DefaultBaseEnvironment` + """ + body = {} + if default_base_environment is not None: + body["default_base_environment"] = default_base_environment.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/default-base-environments/{id}", body=body, headers=headers) + return DefaultBaseEnvironment.from_dict(res) + class PolicyComplianceForClustersAPI: """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 01ac655a4..85ed1b8cc 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -102,6 +102,72 @@ def from_dict(cls, d: Dict[str, Any]) -> AuthorizationDetailsGrantRule: return cls(permission_set=d.get("permission_set", None)) +@dataclass +class CancelQueryExecutionResponse: + status: Optional[List[CancelQueryExecutionResponseStatus]] = None + + def as_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.status: + body["status"] = [v.as_dict() for v in self.status] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.status: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CancelQueryExecutionResponse: + """Deserializes the CancelQueryExecutionResponse from a dictionary.""" + return cls(status=_repeated_dict(d, "status", CancelQueryExecutionResponseStatus)) + + +@dataclass +class CancelQueryExecutionResponseStatus: + data_token: str + """The token to poll for result asynchronously Example: + EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" + + pending: Optional[Empty] = None + + success: Optional[Empty] = None + + def as_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponseStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.pending: + body["pending"] = self.pending.as_dict() + if self.success: + body["success"] = self.success.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponseStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.pending: + body["pending"] = self.pending + if self.success: + body["success"] = self.success + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CancelQueryExecutionResponseStatus: + """Deserializes the CancelQueryExecutionResponseStatus from a dictionary.""" + return cls( + data_token=d.get("data_token", None), + pending=_from_dict(d, "pending", Empty), + success=_from_dict(d, "success", Empty), + ) + + @dataclass class CronSchedule: quartz_cron_expression: str @@ -253,6 +319,45 @@ class DashboardView(Enum): DASHBOARD_VIEW_BASIC = "DASHBOARD_VIEW_BASIC" +@dataclass +class Empty: + """Represents an empty message, similar to google.protobuf.Empty, which is not available in the + firm right now.""" + + def as_dict(self) -> dict: + """Serializes the Empty into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Empty into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Empty: + """Deserializes the Empty from a dictionary.""" + return cls() + + +@dataclass +class ExecuteQueryResponse: + def as_dict(self) -> dict: + """Serializes the ExecuteQueryResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExecuteQueryResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExecuteQueryResponse: + """Deserializes the ExecuteQueryResponse from a dictionary.""" + return cls() + + @dataclass class GenieAttachment: """Genie AI Response""" @@ -413,6 +518,57 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieConversationSummary: ) +@dataclass +class GenieGenerateDownloadFullQueryResultResponse: + download_id: Optional[str] = None + """Download ID. Use this ID to track the download request in subsequent polling calls""" + + def as_dict(self) -> dict: + """Serializes the GenieGenerateDownloadFullQueryResultResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.download_id is not None: + body["download_id"] = self.download_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieGenerateDownloadFullQueryResultResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.download_id is not None: + body["download_id"] = self.download_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieGenerateDownloadFullQueryResultResponse: + """Deserializes the GenieGenerateDownloadFullQueryResultResponse from a dictionary.""" + return cls(download_id=d.get("download_id", None)) + + +@dataclass +class GenieGetDownloadFullQueryResultResponse: + statement_response: Optional[sql.StatementResponse] = None + """SQL Statement Execution response. See [Get status, manifest, and result first + chunk](:method:statementexecution/getstatement) for more details.""" + + def as_dict(self) -> dict: + """Serializes the GenieGetDownloadFullQueryResultResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.statement_response: + body["statement_response"] = self.statement_response.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieGetDownloadFullQueryResultResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.statement_response: + body["statement_response"] = self.statement_response + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieGetDownloadFullQueryResultResponse: + """Deserializes the GenieGetDownloadFullQueryResultResponse from a dictionary.""" + return cls(statement_response=_from_dict(d, "statement_response", sql.StatementResponse)) + + @dataclass class GenieGetMessageQueryResultResponse: statement_response: Optional[sql.StatementResponse] = None @@ -816,6 +972,24 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieStartConversationResponse: ) +@dataclass +class GetPublishedDashboardEmbeddedResponse: + def as_dict(self) -> dict: + """Serializes the GetPublishedDashboardEmbeddedResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GetPublishedDashboardEmbeddedResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GetPublishedDashboardEmbeddedResponse: + """Deserializes the GetPublishedDashboardEmbeddedResponse from a dictionary.""" + return cls() + + @dataclass class GetPublishedDashboardTokenInfoResponse: authorization_details: Optional[List[AuthorizationDetails]] = None @@ -1084,6 +1258,80 @@ class MessageStatus(Enum): SUBMITTED = "SUBMITTED" +@dataclass +class PendingStatus: + data_token: str + """The token to poll for result asynchronously Example: + EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" + + def as_dict(self) -> dict: + """Serializes the PendingStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PendingStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PendingStatus: + """Deserializes the PendingStatus from a dictionary.""" + return cls(data_token=d.get("data_token", None)) + + +@dataclass +class PollQueryStatusResponse: + data: Optional[List[PollQueryStatusResponseData]] = None + + def as_dict(self) -> dict: + """Serializes the PollQueryStatusResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data: + body["data"] = [v.as_dict() for v in self.data] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PollQueryStatusResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data: + body["data"] = self.data + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PollQueryStatusResponse: + """Deserializes the PollQueryStatusResponse from a dictionary.""" + return cls(data=_repeated_dict(d, "data", PollQueryStatusResponseData)) + + +@dataclass +class PollQueryStatusResponseData: + status: QueryResponseStatus + + def as_dict(self) -> dict: + """Serializes the PollQueryStatusResponseData into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.status: + body["status"] = self.status.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PollQueryStatusResponseData into a shallow dictionary of its immediate attributes.""" + body = {} + if self.status: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PollQueryStatusResponseData: + """Deserializes the PollQueryStatusResponseData from a dictionary.""" + return cls(status=_from_dict(d, "status", QueryResponseStatus)) + + @dataclass class PublishedDashboard: display_name: Optional[str] = None @@ -1135,6 +1383,63 @@ def from_dict(cls, d: Dict[str, Any]) -> PublishedDashboard: ) +@dataclass +class QueryResponseStatus: + canceled: Optional[Empty] = None + + closed: Optional[Empty] = None + + pending: Optional[PendingStatus] = None + + statement_id: Optional[str] = None + """The statement id in format(01eef5da-c56e-1f36-bafa-21906587d6ba) The statement_id should be + identical to data_token in SuccessStatus and PendingStatus. This field is created for audit + logging purpose to record the statement_id of all QueryResponseStatus.""" + + success: Optional[SuccessStatus] = None + + def as_dict(self) -> dict: + """Serializes the QueryResponseStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.canceled: + body["canceled"] = self.canceled.as_dict() + if self.closed: + body["closed"] = self.closed.as_dict() + if self.pending: + body["pending"] = self.pending.as_dict() + if self.statement_id is not None: + body["statement_id"] = self.statement_id + if self.success: + body["success"] = self.success.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the QueryResponseStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.canceled: + body["canceled"] = self.canceled + if self.closed: + body["closed"] = self.closed + if self.pending: + body["pending"] = self.pending + if self.statement_id is not None: + body["statement_id"] = self.statement_id + if self.success: + body["success"] = self.success + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> QueryResponseStatus: + """Deserializes the QueryResponseStatus from a dictionary.""" + return cls( + canceled=_from_dict(d, "canceled", Empty), + closed=_from_dict(d, "closed", Empty), + pending=_from_dict(d, "pending", PendingStatus), + statement_id=d.get("statement_id", None), + success=_from_dict(d, "success", SuccessStatus), + ) + + @dataclass class Result: is_truncated: Optional[bool] = None @@ -1450,6 +1755,39 @@ def from_dict(cls, d: Dict[str, Any]) -> SubscriptionSubscriberUser: return cls(user_id=d.get("user_id", None)) +@dataclass +class SuccessStatus: + data_token: str + """The token to poll for result asynchronously Example: + EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" + + truncated: Optional[bool] = None + """Whether the query result is truncated (either by byte limit or row limit)""" + + def as_dict(self) -> dict: + """Serializes the SuccessStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.truncated is not None: + body["truncated"] = self.truncated + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SuccessStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.truncated is not None: + body["truncated"] = self.truncated + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SuccessStatus: + """Deserializes the SuccessStatus from a dictionary.""" + return cls(data_token=d.get("data_token", None), truncated=d.get("truncated", None)) + + @dataclass class TextAttachment: content: Optional[str] = None @@ -1676,6 +2014,75 @@ def execute_message_query( ) return GenieGetMessageQueryResultResponse.from_dict(res) + def generate_download_full_query_result( + self, space_id: str, conversation_id: str, message_id: str, attachment_id: str + ) -> GenieGenerateDownloadFullQueryResultResponse: + """Initiates a new SQL execution and returns a `download_id` that you can use to track the progress of + the download. The query result is stored in an external link and can be retrieved using the [Get + Download Full Query Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks + strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. + See [Execute Statement](:method:statementexecution/executestatement) for more details. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + :param attachment_id: str + Attachment ID + + :returns: :class:`GenieGenerateDownloadFullQueryResultResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads", + headers=headers, + ) + return GenieGenerateDownloadFullQueryResultResponse.from_dict(res) + + def get_download_full_query_result( + self, space_id: str, conversation_id: str, message_id: str, attachment_id: str, download_id: str + ) -> GenieGetDownloadFullQueryResultResponse: + """After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and + successfully receiving a `download_id`, use this API to poll the download progress. When the download + is complete, the API returns one or more external links to the query result files. Warning: Databricks + strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. + You must not set an Authorization header in download requests. When using the `EXTERNAL_LINKS` + disposition, Databricks returns presigned URLs that grant temporary access to data. See [Execute + Statement](:method:statementexecution/executestatement) for more details. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + :param attachment_id: str + Attachment ID + :param download_id: str + Download ID. This ID is provided by the [Generate Download + endpoint](:method:genie/generateDownloadFullQueryResult) + + :returns: :class:`GenieGetDownloadFullQueryResultResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads/{download_id}", + headers=headers, + ) + return GenieGetDownloadFullQueryResultResponse.from_dict(res) + def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage: """Get message from conversation. @@ -2375,6 +2782,21 @@ class LakeviewEmbeddedAPI: def __init__(self, api_client): self._api = api_client + def get_published_dashboard_embedded(self, dashboard_id: str): + """Get the current published dashboard within an embedded context. + + :param dashboard_id: str + UUID identifying the published dashboard. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published/embedded", headers=headers) + def get_published_dashboard_token_info( self, dashboard_id: str, *, external_value: Optional[str] = None, external_viewer_id: Optional[str] = None ) -> GetPublishedDashboardTokenInfoResponse: @@ -2403,3 +2825,93 @@ def get_published_dashboard_token_info( "GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published/tokeninfo", query=query, headers=headers ) return GetPublishedDashboardTokenInfoResponse.from_dict(res) + + +class QueryExecutionAPI: + """Query execution APIs for AI / BI Dashboards""" + + def __init__(self, api_client): + self._api = api_client + + def cancel_published_query_execution( + self, dashboard_name: str, dashboard_revision_id: str, *, tokens: Optional[List[str]] = None + ) -> CancelQueryExecutionResponse: + """Cancel the results for the a query for a published, embedded dashboard. + + :param dashboard_name: str + :param dashboard_revision_id: str + :param tokens: List[str] (optional) + Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ + + :returns: :class:`CancelQueryExecutionResponse` + """ + + query = {} + if dashboard_name is not None: + query["dashboard_name"] = dashboard_name + if dashboard_revision_id is not None: + query["dashboard_revision_id"] = dashboard_revision_id + if tokens is not None: + query["tokens"] = [v for v in tokens] + headers = { + "Accept": "application/json", + } + + res = self._api.do("DELETE", "/api/2.0/lakeview-query/query/published", query=query, headers=headers) + return CancelQueryExecutionResponse.from_dict(res) + + def execute_published_dashboard_query( + self, dashboard_name: str, dashboard_revision_id: str, *, override_warehouse_id: Optional[str] = None + ): + """Execute a query for a published dashboard. + + :param dashboard_name: str + Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the + list of datasets, warehouse_id, and embedded_credentials + :param dashboard_revision_id: str + :param override_warehouse_id: str (optional) + A dashboard schedule can override the warehouse used as compute for processing the published + dashboard queries + + + """ + body = {} + if dashboard_name is not None: + body["dashboard_name"] = dashboard_name + if dashboard_revision_id is not None: + body["dashboard_revision_id"] = dashboard_revision_id + if override_warehouse_id is not None: + body["override_warehouse_id"] = override_warehouse_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/lakeview-query/query/published", body=body, headers=headers) + + def poll_published_query_status( + self, dashboard_name: str, dashboard_revision_id: str, *, tokens: Optional[List[str]] = None + ) -> PollQueryStatusResponse: + """Poll the results for the a query for a published, embedded dashboard. + + :param dashboard_name: str + :param dashboard_revision_id: str + :param tokens: List[str] (optional) + Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ + + :returns: :class:`PollQueryStatusResponse` + """ + + query = {} + if dashboard_name is not None: + query["dashboard_name"] = dashboard_name + if dashboard_revision_id is not None: + query["dashboard_revision_id"] = dashboard_revision_id + if tokens is not None: + query["tokens"] = [v for v in tokens] + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/lakeview-query/query/published", query=query, headers=headers) + return PollQueryStatusResponse.from_dict(res) diff --git a/databricks/sdk/service/database.py b/databricks/sdk/service/database.py index 183d03140..1aa34d0d0 100755 --- a/databricks/sdk/service/database.py +++ b/databricks/sdk/service/database.py @@ -110,6 +110,11 @@ class DatabaseInstance: name: str """The name of the instance. This is the unique identifier for the instance.""" + budget_policy_id: Optional[str] = None + """The desired budget policy to associate with the instance. This field is only returned on + create/update responses, and represents the customer provided budget policy. See + effective_budget_policy_id for the policy that is actually applied to the instance.""" + capacity: Optional[str] = None """The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4", "CU_8".""" @@ -122,6 +127,16 @@ class DatabaseInstance: creator: Optional[str] = None """The email of the creator of the instance.""" + effective_budget_policy_id: Optional[str] = None + """The policy that is applied to the instance.""" + + effective_enable_pg_native_login: Optional[bool] = None + """xref AIP-129. `enable_pg_native_login` is owned by the client, while + `effective_enable_pg_native_login` is owned by the server. `enable_pg_native_login` will only be + set in Create/Update response messages if and only if the user provides the field via the + request. `effective_enable_pg_native_login` on the other hand will always bet set in all + response messages (Create/Update/Get/List).""" + effective_enable_readable_secondaries: Optional[bool] = None """xref AIP-129. `enable_readable_secondaries` is owned by the client, while `effective_enable_readable_secondaries` is owned by the server. `enable_readable_secondaries` @@ -148,6 +163,9 @@ class DatabaseInstance: provides the field via the request. `effective_stopped` on the other hand will always bet set in all response messages (Create/Update/Get/List).""" + enable_pg_native_login: Optional[bool] = None + """Whether the instance has PG native password login enabled. Defaults to true.""" + enable_readable_secondaries: Optional[bool] = None """Whether to enable secondaries to serve read-only traffic. Defaults to false.""" @@ -186,6 +204,8 @@ class DatabaseInstance: def as_dict(self) -> dict: """Serializes the DatabaseInstance into a dictionary suitable for use as a JSON request body.""" body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id if self.capacity is not None: body["capacity"] = self.capacity if self.child_instance_refs: @@ -194,6 +214,10 @@ def as_dict(self) -> dict: body["creation_time"] = self.creation_time if self.creator is not None: body["creator"] = self.creator + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_enable_pg_native_login is not None: + body["effective_enable_pg_native_login"] = self.effective_enable_pg_native_login if self.effective_enable_readable_secondaries is not None: body["effective_enable_readable_secondaries"] = self.effective_enable_readable_secondaries if self.effective_node_count is not None: @@ -202,6 +226,8 @@ def as_dict(self) -> dict: body["effective_retention_window_in_days"] = self.effective_retention_window_in_days if self.effective_stopped is not None: body["effective_stopped"] = self.effective_stopped + if self.enable_pg_native_login is not None: + body["enable_pg_native_login"] = self.enable_pg_native_login if self.enable_readable_secondaries is not None: body["enable_readable_secondaries"] = self.enable_readable_secondaries if self.name is not None: @@ -229,6 +255,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the DatabaseInstance into a shallow dictionary of its immediate attributes.""" body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id if self.capacity is not None: body["capacity"] = self.capacity if self.child_instance_refs: @@ -237,6 +265,10 @@ def as_shallow_dict(self) -> dict: body["creation_time"] = self.creation_time if self.creator is not None: body["creator"] = self.creator + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_enable_pg_native_login is not None: + body["effective_enable_pg_native_login"] = self.effective_enable_pg_native_login if self.effective_enable_readable_secondaries is not None: body["effective_enable_readable_secondaries"] = self.effective_enable_readable_secondaries if self.effective_node_count is not None: @@ -245,6 +277,8 @@ def as_shallow_dict(self) -> dict: body["effective_retention_window_in_days"] = self.effective_retention_window_in_days if self.effective_stopped is not None: body["effective_stopped"] = self.effective_stopped + if self.enable_pg_native_login is not None: + body["enable_pg_native_login"] = self.enable_pg_native_login if self.enable_readable_secondaries is not None: body["enable_readable_secondaries"] = self.enable_readable_secondaries if self.name is not None: @@ -273,14 +307,18 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstance: """Deserializes the DatabaseInstance from a dictionary.""" return cls( + budget_policy_id=d.get("budget_policy_id", None), capacity=d.get("capacity", None), child_instance_refs=_repeated_dict(d, "child_instance_refs", DatabaseInstanceRef), creation_time=d.get("creation_time", None), creator=d.get("creator", None), + effective_budget_policy_id=d.get("effective_budget_policy_id", None), + effective_enable_pg_native_login=d.get("effective_enable_pg_native_login", None), effective_enable_readable_secondaries=d.get("effective_enable_readable_secondaries", None), effective_node_count=d.get("effective_node_count", None), effective_retention_window_in_days=d.get("effective_retention_window_in_days", None), effective_stopped=d.get("effective_stopped", None), + enable_pg_native_login=d.get("enable_pg_native_login", None), enable_readable_secondaries=d.get("enable_readable_secondaries", None), name=d.get("name", None), node_count=d.get("node_count", None), @@ -518,6 +556,9 @@ class DatabaseTable: When creating a table in a standard catalog, this field is required. In this scenario, specifying this field will allow targeting an arbitrary postgres database.""" + table_serving_url: Optional[str] = None + """Data serving REST API URL for this table""" + def as_dict(self) -> dict: """Serializes the DatabaseTable into a dictionary suitable for use as a JSON request body.""" body = {} @@ -527,6 +568,8 @@ def as_dict(self) -> dict: body["logical_database_name"] = self.logical_database_name if self.name is not None: body["name"] = self.name + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url return body def as_shallow_dict(self) -> dict: @@ -538,6 +581,8 @@ def as_shallow_dict(self) -> dict: body["logical_database_name"] = self.logical_database_name if self.name is not None: body["name"] = self.name + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url return body @classmethod @@ -547,6 +592,7 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseTable: database_instance_name=d.get("database_instance_name", None), logical_database_name=d.get("logical_database_name", None), name=d.get("name", None), + table_serving_url=d.get("table_serving_url", None), ) @@ -586,6 +632,40 @@ def from_dict(cls, d: Dict[str, Any]) -> DeltaTableSyncInfo: ) +@dataclass +class ListDatabaseCatalogsResponse: + database_catalogs: Optional[List[DatabaseCatalog]] = None + + next_page_token: Optional[str] = None + """Pagination token to request the next page of database catalogs.""" + + def as_dict(self) -> dict: + """Serializes the ListDatabaseCatalogsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.database_catalogs: + body["database_catalogs"] = [v.as_dict() for v in self.database_catalogs] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDatabaseCatalogsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.database_catalogs: + body["database_catalogs"] = self.database_catalogs + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseCatalogsResponse: + """Deserializes the ListDatabaseCatalogsResponse from a dictionary.""" + return cls( + database_catalogs=_repeated_dict(d, "database_catalogs", DatabaseCatalog), + next_page_token=d.get("next_page_token", None), + ) + + @dataclass class ListDatabaseInstanceRolesResponse: database_instance_roles: Optional[List[DatabaseInstanceRole]] = None @@ -656,11 +736,48 @@ def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseInstancesResponse: ) +@dataclass +class ListSyncedDatabaseTablesResponse: + next_page_token: Optional[str] = None + """Pagination token to request the next page of synced tables.""" + + synced_tables: Optional[List[SyncedDatabaseTable]] = None + + def as_dict(self) -> dict: + """Serializes the ListSyncedDatabaseTablesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.synced_tables: + body["synced_tables"] = [v.as_dict() for v in self.synced_tables] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListSyncedDatabaseTablesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.synced_tables: + body["synced_tables"] = self.synced_tables + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListSyncedDatabaseTablesResponse: + """Deserializes the ListSyncedDatabaseTablesResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + synced_tables=_repeated_dict(d, "synced_tables", SyncedDatabaseTable), + ) + + @dataclass class NewPipelineSpec: """Custom fields that user can set for pipeline while creating SyncedDatabaseTable. Note that other fields of pipeline are still inferred by table def internally""" + budget_policy_id: Optional[str] = None + """Budget policy of this pipeline.""" + storage_catalog: Optional[str] = None """This field needs to be specified if the destination catalog is a managed postgres catalog. @@ -676,6 +793,8 @@ class NewPipelineSpec: def as_dict(self) -> dict: """Serializes the NewPipelineSpec into a dictionary suitable for use as a JSON request body.""" body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id if self.storage_catalog is not None: body["storage_catalog"] = self.storage_catalog if self.storage_schema is not None: @@ -685,6 +804,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the NewPipelineSpec into a shallow dictionary of its immediate attributes.""" body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id if self.storage_catalog is not None: body["storage_catalog"] = self.storage_catalog if self.storage_schema is not None: @@ -694,7 +815,11 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> NewPipelineSpec: """Deserializes the NewPipelineSpec from a dictionary.""" - return cls(storage_catalog=d.get("storage_catalog", None), storage_schema=d.get("storage_schema", None)) + return cls( + budget_policy_id=d.get("budget_policy_id", None), + storage_catalog=d.get("storage_catalog", None), + storage_schema=d.get("storage_schema", None), + ) class ProvisioningInfoState(Enum): @@ -809,6 +934,9 @@ class SyncedDatabaseTable: spec: Optional[SyncedTableSpec] = None + table_serving_url: Optional[str] = None + """Data serving REST API URL for this table""" + unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None """The provisioning state of the synced table entity in Unity Catalog. This is distinct from the state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline @@ -827,6 +955,8 @@ def as_dict(self) -> dict: body["name"] = self.name if self.spec: body["spec"] = self.spec.as_dict() + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url if self.unity_catalog_provisioning_state is not None: body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value return body @@ -844,6 +974,8 @@ def as_shallow_dict(self) -> dict: body["name"] = self.name if self.spec: body["spec"] = self.spec + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url if self.unity_catalog_provisioning_state is not None: body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state return body @@ -857,6 +989,7 @@ def from_dict(cls, d: Dict[str, Any]) -> SyncedDatabaseTable: logical_database_name=d.get("logical_database_name", None), name=d.get("name", None), spec=_from_dict(d, "spec", SyncedTableSpec), + table_serving_url=d.get("table_serving_url", None), unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState), ) @@ -1532,6 +1665,28 @@ def delete_synced_database_table(self, name: str): self._api.do("DELETE", f"/api/2.0/database/synced_tables/{name}", headers=headers) + def failover_database_instance( + self, name: str, *, failover_target_database_instance_name: Optional[str] = None + ) -> DatabaseInstance: + """Failover the primary node of a Database Instance to a secondary. + + :param name: str + Name of the instance to failover. + :param failover_target_database_instance_name: str (optional) + + :returns: :class:`DatabaseInstance` + """ + body = {} + if failover_target_database_instance_name is not None: + body["failover_target_database_instance_name"] = failover_target_database_instance_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/database/instances/{name}/failover", body=body, headers=headers) + return DatabaseInstance.from_dict(res) + def find_database_instance_by_uid(self, *, uid: Optional[str] = None) -> DatabaseInstance: """Find a Database Instance by uid. @@ -1661,6 +1816,41 @@ def get_synced_database_table(self, name: str) -> SyncedDatabaseTable: res = self._api.do("GET", f"/api/2.0/database/synced_tables/{name}", headers=headers) return SyncedDatabaseTable.from_dict(res) + def list_database_catalogs( + self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[DatabaseCatalog]: + """List all Database Catalogs within a Database Instance. + + :param instance_name: str + Name of the instance to get database catalogs for. + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of synced database tables. Requests first page if absent. + + :returns: Iterator over :class:`DatabaseCatalog` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", f"/api/2.0/database/instances/{instance_name}/catalogs", query=query, headers=headers + ) + if "database_catalogs" in json: + for v in json["database_catalogs"]: + yield DatabaseCatalog.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + def list_database_instance_roles( self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[DatabaseInstanceRole]: @@ -1726,6 +1916,67 @@ def list_database_instances( return query["page_token"] = json["next_page_token"] + def list_synced_database_tables( + self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[SyncedDatabaseTable]: + """List all Synced Database Tables within a Database Instance. + + :param instance_name: str + Name of the instance to get synced tables for. + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of synced database tables. Requests first page if absent. + + :returns: Iterator over :class:`SyncedDatabaseTable` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", f"/api/2.0/database/instances/{instance_name}/synced_tables", query=query, headers=headers + ) + if "synced_tables" in json: + for v in json["synced_tables"]: + yield SyncedDatabaseTable.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_database_catalog( + self, name: str, database_catalog: DatabaseCatalog, update_mask: str + ) -> DatabaseCatalog: + """Updated a Database Catalog. + + :param name: str + The name of the catalog in UC. + :param database_catalog: :class:`DatabaseCatalog` + Note that updating a database catalog is not yet supported. + :param update_mask: str + The list of fields to update. Setting this field is not yet supported. + + :returns: :class:`DatabaseCatalog` + """ + body = database_catalog.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/database/catalogs/{name}", query=query, body=body, headers=headers) + return DatabaseCatalog.from_dict(res) + def update_database_instance( self, name: str, database_instance: DatabaseInstance, update_mask: str ) -> DatabaseInstance: @@ -1750,3 +2001,29 @@ def update_database_instance( res = self._api.do("PATCH", f"/api/2.0/database/instances/{name}", query=query, body=body, headers=headers) return DatabaseInstance.from_dict(res) + + def update_synced_database_table( + self, name: str, synced_table: SyncedDatabaseTable, update_mask: str + ) -> SyncedDatabaseTable: + """Update a Synced Database Table. + + :param name: str + Full three-part (catalog, schema, table) name of the table. + :param synced_table: :class:`SyncedDatabaseTable` + Note that updating a synced database table is not yet supported. + :param update_mask: str + The list of fields to update. Setting this field is not yet supported. + + :returns: :class:`SyncedDatabaseTable` + """ + body = synced_table.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/database/synced_tables/{name}", query=query, body=body, headers=headers) + return SyncedDatabaseTable.from_dict(res) diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 50c7ddb83..fae841354 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -857,11 +857,16 @@ class Continuous: pause_status: Optional[PauseStatus] = None """Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED.""" + task_retry_mode: Optional[TaskRetryMode] = None + """Indicate whether the continuous job is applying task level retries or not. Defaults to NEVER.""" + def as_dict(self) -> dict: """Serializes the Continuous into a dictionary suitable for use as a JSON request body.""" body = {} if self.pause_status is not None: body["pause_status"] = self.pause_status.value + if self.task_retry_mode is not None: + body["task_retry_mode"] = self.task_retry_mode.value return body def as_shallow_dict(self) -> dict: @@ -869,12 +874,17 @@ def as_shallow_dict(self) -> dict: body = {} if self.pause_status is not None: body["pause_status"] = self.pause_status + if self.task_retry_mode is not None: + body["task_retry_mode"] = self.task_retry_mode return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Continuous: """Deserializes the Continuous from a dictionary.""" - return cls(pause_status=_enum(d, "pause_status", PauseStatus)) + return cls( + pause_status=_enum(d, "pause_status", PauseStatus), + task_retry_mode=_enum(d, "task_retry_mode", TaskRetryMode), + ) @dataclass @@ -3486,6 +3496,78 @@ def from_dict(cls, d: Dict[str, Any]) -> ListRunsResponse: ) +@dataclass +class ModelTriggerConfiguration: + condition: ModelTriggerConfigurationCondition + """The condition based on which to trigger a job run.""" + + aliases: Optional[List[str]] = None + """Aliases of the model versions to monitor. Can only be used in conjunction with condition + MODEL_ALIAS_SET.""" + + min_time_between_triggers_seconds: Optional[int] = None + """If set, the trigger starts a run only after the specified amount of time has passed since the + last time the trigger fired. The minimum allowed value is 60 seconds.""" + + securable_name: Optional[str] = None + """Name of the securable to monitor ("mycatalog.myschema.mymodel" in the case of model-level + triggers, "mycatalog.myschema" in the case of schema-level triggers) or empty in the case of + metastore-level triggers.""" + + wait_after_last_change_seconds: Optional[int] = None + """If set, the trigger starts a run only after no model updates have occurred for the specified + time and can be used to wait for a series of model updates before triggering a run. The minimum + allowed value is 60 seconds.""" + + def as_dict(self) -> dict: + """Serializes the ModelTriggerConfiguration into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aliases: + body["aliases"] = [v for v in self.aliases] + if self.condition is not None: + body["condition"] = self.condition.value + if self.min_time_between_triggers_seconds is not None: + body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds + if self.securable_name is not None: + body["securable_name"] = self.securable_name + if self.wait_after_last_change_seconds is not None: + body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ModelTriggerConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aliases: + body["aliases"] = self.aliases + if self.condition is not None: + body["condition"] = self.condition + if self.min_time_between_triggers_seconds is not None: + body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds + if self.securable_name is not None: + body["securable_name"] = self.securable_name + if self.wait_after_last_change_seconds is not None: + body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ModelTriggerConfiguration: + """Deserializes the ModelTriggerConfiguration from a dictionary.""" + return cls( + aliases=d.get("aliases", None), + condition=_enum(d, "condition", ModelTriggerConfigurationCondition), + min_time_between_triggers_seconds=d.get("min_time_between_triggers_seconds", None), + securable_name=d.get("securable_name", None), + wait_after_last_change_seconds=d.get("wait_after_last_change_seconds", None), + ) + + +class ModelTriggerConfigurationCondition(Enum): + + MODEL_ALIAS_SET = "MODEL_ALIAS_SET" + MODEL_CREATED = "MODEL_CREATED" + MODEL_VERSION_READY = "MODEL_VERSION_READY" + + @dataclass class NotebookOutput: result: Optional[str] = None @@ -7782,6 +7864,16 @@ def from_dict(cls, d: Dict[str, Any]) -> TaskNotificationSettings: ) +class TaskRetryMode(Enum): + """task retry mode of the continuous job * NEVER: The failed task will not be retried. * + ON_FAILURE: Retry a failed task if at least one other task in the job is still running its first + attempt. When this condition is no longer met or the retry limit is reached, the job run is + cancelled and a new run is started.""" + + NEVER = "NEVER" + ON_FAILURE = "ON_FAILURE" + + class TerminationCodeCode(Enum): """The code indicates why the run was terminated. Additional codes might be introduced in future releases. * `SUCCESS`: The run was completed successfully. * `SUCCESS_WITH_FAILURES`: The run @@ -7936,6 +8028,8 @@ class TriggerSettings: file_arrival: Optional[FileArrivalTriggerConfiguration] = None """File arrival trigger settings.""" + model: Optional[ModelTriggerConfiguration] = None + pause_status: Optional[PauseStatus] = None """Whether this trigger is paused or not.""" @@ -7952,6 +8046,8 @@ def as_dict(self) -> dict: body = {} if self.file_arrival: body["file_arrival"] = self.file_arrival.as_dict() + if self.model: + body["model"] = self.model.as_dict() if self.pause_status is not None: body["pause_status"] = self.pause_status.value if self.periodic: @@ -7967,6 +8063,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.file_arrival: body["file_arrival"] = self.file_arrival + if self.model: + body["model"] = self.model if self.pause_status is not None: body["pause_status"] = self.pause_status if self.periodic: @@ -7982,6 +8080,7 @@ def from_dict(cls, d: Dict[str, Any]) -> TriggerSettings: """Deserializes the TriggerSettings from a dictionary.""" return cls( file_arrival=_from_dict(d, "file_arrival", FileArrivalTriggerConfiguration), + model=_from_dict(d, "model", ModelTriggerConfiguration), pause_status=_enum(d, "pause_status", PauseStatus), periodic=_from_dict(d, "periodic", PeriodicTriggerConfiguration), table=_from_dict(d, "table", TableUpdateTriggerConfiguration), diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index 9c8c90627..e83b3dd6f 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -1691,6 +1691,31 @@ def from_dict(cls, d: Dict[str, Any]) -> GetLoggedModelResponse: return cls(model=_from_dict(d, "model", LoggedModel)) +@dataclass +class GetLoggedModelsRequestResponse: + models: Optional[List[LoggedModel]] = None + """The retrieved logged models.""" + + def as_dict(self) -> dict: + """Serializes the GetLoggedModelsRequestResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.models: + body["models"] = [v.as_dict() for v in self.models] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GetLoggedModelsRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.models: + body["models"] = self.models + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GetLoggedModelsRequestResponse: + """Deserializes the GetLoggedModelsRequestResponse from a dictionary.""" + return cls(models=_repeated_dict(d, "models", LoggedModel)) + + @dataclass class GetMetricHistoryResponse: metrics: Optional[List[Metric]] = None @@ -5389,6 +5414,25 @@ def get_logged_model(self, model_id: str) -> GetLoggedModelResponse: res = self._api.do("GET", f"/api/2.0/mlflow/logged-models/{model_id}", headers=headers) return GetLoggedModelResponse.from_dict(res) + def get_logged_models(self, *, model_ids: Optional[List[str]] = None) -> GetLoggedModelsRequestResponse: + """Batch endpoint for getting logged models from a list of model IDs + + :param model_ids: List[str] (optional) + The IDs of the logged models to retrieve. Max threshold is 100. + + :returns: :class:`GetLoggedModelsRequestResponse` + """ + + query = {} + if model_ids is not None: + query["model_ids"] = [v for v in model_ids] + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/mlflow/logged-models:batchGet", query=query, headers=headers) + return GetLoggedModelsRequestResponse.from_dict(res) + def get_permission_levels(self, experiment_id: str) -> GetExperimentPermissionLevelsResponse: """Gets the permission levels that a user can have on an object. diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index 74c9cdd13..e06c85edc 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -606,6 +606,11 @@ class IngestionPipelineDefinition: """Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.""" + ingest_from_uc_foreign_catalog: Optional[bool] = None + """Immutable. If set to true, the pipeline will ingest tables from the UC foreign catalogs directly + without the need to specify a UC connection or ingestion gateway. The `source_catalog` fields in + objects of IngestionConfig are interpreted as the UC foreign catalogs to ingest from.""" + ingestion_gateway_id: Optional[str] = None """Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.""" @@ -626,6 +631,8 @@ def as_dict(self) -> dict: body = {} if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.ingest_from_uc_foreign_catalog is not None: + body["ingest_from_uc_foreign_catalog"] = self.ingest_from_uc_foreign_catalog if self.ingestion_gateway_id is not None: body["ingestion_gateway_id"] = self.ingestion_gateway_id if self.objects: @@ -641,6 +648,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.ingest_from_uc_foreign_catalog is not None: + body["ingest_from_uc_foreign_catalog"] = self.ingest_from_uc_foreign_catalog if self.ingestion_gateway_id is not None: body["ingestion_gateway_id"] = self.ingestion_gateway_id if self.objects: @@ -656,6 +665,7 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinition: """Deserializes the IngestionPipelineDefinition from a dictionary.""" return cls( connection_name=d.get("connection_name", None), + ingest_from_uc_foreign_catalog=d.get("ingest_from_uc_foreign_catalog", None), ingestion_gateway_id=d.get("ingestion_gateway_id", None), objects=_repeated_dict(d, "objects", IngestionConfig), source_type=_enum(d, "source_type", IngestionSourceType), @@ -2344,6 +2354,24 @@ def from_dict(cls, d: Dict[str, Any]) -> RestartWindow: ) +@dataclass +class RestorePipelineRequestResponse: + def as_dict(self) -> dict: + """Serializes the RestorePipelineRequestResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RestorePipelineRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RestorePipelineRequestResponse: + """Deserializes the RestorePipelineRequestResponse from a dictionary.""" + return cls() + + @dataclass class RunAs: """Write-only setting, available only in Create/Update calls. Specifies the user or service @@ -2728,6 +2756,10 @@ class TableSpecificConfig: None ) + row_filter: Optional[str] = None + """(Optional, Immutable) The row filter condition to be applied to the table. It must not contain + the WHERE keyword, only the actual filter condition. It must be in DBSQL format.""" + salesforce_include_formula_fields: Optional[bool] = None """If true, formula fields defined in the table are included in the ingestion. This setting is only valid for the Salesforce connector""" @@ -2750,6 +2782,8 @@ def as_dict(self) -> dict: body["primary_keys"] = [v for v in self.primary_keys] if self.query_based_connector_config: body["query_based_connector_config"] = self.query_based_connector_config.as_dict() + if self.row_filter is not None: + body["row_filter"] = self.row_filter if self.salesforce_include_formula_fields is not None: body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields if self.scd_type is not None: @@ -2769,6 +2803,8 @@ def as_shallow_dict(self) -> dict: body["primary_keys"] = self.primary_keys if self.query_based_connector_config: body["query_based_connector_config"] = self.query_based_connector_config + if self.row_filter is not None: + body["row_filter"] = self.row_filter if self.salesforce_include_formula_fields is not None: body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields if self.scd_type is not None: @@ -2789,6 +2825,7 @@ def from_dict(cls, d: Dict[str, Any]) -> TableSpecificConfig: "query_based_connector_config", IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig, ), + row_filter=d.get("row_filter", None), salesforce_include_formula_fields=d.get("salesforce_include_formula_fields", None), scd_type=_enum(d, "scd_type", TableSpecificConfigScdType), sequence_by=d.get("sequence_by", None), @@ -3449,6 +3486,23 @@ def list_updates( res = self._api.do("GET", f"/api/2.0/pipelines/{pipeline_id}/updates", query=query, headers=headers) return ListUpdatesResponse.from_dict(res) + def restore_pipeline(self, pipeline_id: str) -> RestorePipelineRequestResponse: + """* Restores a pipeline that was previously deleted, if within the restoration window. All tables + deleted at pipeline deletion will be undropped as well. + + :param pipeline_id: str + The ID of the pipeline to restore + + :returns: :class:`RestorePipelineRequestResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/pipelines/{pipeline_id}/restore", headers=headers) + return RestorePipelineRequestResponse.from_dict(res) + def set_permissions( self, pipeline_id: str, *, access_control_list: Optional[List[PipelineAccessControlRequest]] = None ) -> PipelinePermissions: diff --git a/databricks/sdk/service/qualitymonitorv2.py b/databricks/sdk/service/qualitymonitorv2.py index a6fab7023..b507697a9 100755 --- a/databricks/sdk/service/qualitymonitorv2.py +++ b/databricks/sdk/service/qualitymonitorv2.py @@ -17,6 +17,9 @@ @dataclass class AnomalyDetectionConfig: + job_type: Optional[AnomalyDetectionJobType] = None + """The type of the last run of the workflow.""" + last_run_id: Optional[str] = None """Run id of the last run of the workflow""" @@ -26,6 +29,8 @@ class AnomalyDetectionConfig: def as_dict(self) -> dict: """Serializes the AnomalyDetectionConfig into a dictionary suitable for use as a JSON request body.""" body = {} + if self.job_type is not None: + body["job_type"] = self.job_type.value if self.last_run_id is not None: body["last_run_id"] = self.last_run_id if self.latest_run_status is not None: @@ -35,6 +40,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the AnomalyDetectionConfig into a shallow dictionary of its immediate attributes.""" body = {} + if self.job_type is not None: + body["job_type"] = self.job_type if self.last_run_id is not None: body["last_run_id"] = self.last_run_id if self.latest_run_status is not None: @@ -45,11 +52,18 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> AnomalyDetectionConfig: """Deserializes the AnomalyDetectionConfig from a dictionary.""" return cls( + job_type=_enum(d, "job_type", AnomalyDetectionJobType), last_run_id=d.get("last_run_id", None), latest_run_status=_enum(d, "latest_run_status", AnomalyDetectionRunStatus), ) +class AnomalyDetectionJobType(Enum): + + ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN = "ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN" + ANOMALY_DETECTION_JOB_TYPE_NORMAL = "ANOMALY_DETECTION_JOB_TYPE_NORMAL" + + class AnomalyDetectionRunStatus(Enum): """Status of Anomaly Detection Job Run""" diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index 3df700055..56c48d375 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -313,6 +313,9 @@ class AiGatewayRateLimit: """Principal field for a user, user group, or service principal to apply rate limiting to. Accepts a user email, group name, or service principal application ID.""" + tokens: Optional[int] = None + """Used to specify how many tokens are allowed for a key within the renewal_period.""" + def as_dict(self) -> dict: """Serializes the AiGatewayRateLimit into a dictionary suitable for use as a JSON request body.""" body = {} @@ -324,6 +327,8 @@ def as_dict(self) -> dict: body["principal"] = self.principal if self.renewal_period is not None: body["renewal_period"] = self.renewal_period.value + if self.tokens is not None: + body["tokens"] = self.tokens return body def as_shallow_dict(self) -> dict: @@ -337,6 +342,8 @@ def as_shallow_dict(self) -> dict: body["principal"] = self.principal if self.renewal_period is not None: body["renewal_period"] = self.renewal_period + if self.tokens is not None: + body["tokens"] = self.tokens return body @classmethod @@ -347,6 +354,7 @@ def from_dict(cls, d: Dict[str, Any]) -> AiGatewayRateLimit: key=_enum(d, "key", AiGatewayRateLimitKey), principal=d.get("principal", None), renewal_period=_enum(d, "renewal_period", AiGatewayRateLimitRenewalPeriod), + tokens=d.get("tokens", None), ) diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index 2d379c696..ed89e35d1 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -67,12 +67,12 @@ def from_dict(cls, d: Dict[str, Any]) -> AccountIpAccessEnable: @dataclass class AccountNetworkPolicy: + egress: NetworkPolicyEgress + """The network policies applying for egress traffic.""" + account_id: Optional[str] = None """The associated account ID for this Network Policy object.""" - egress: Optional[NetworkPolicyEgress] = None - """The network policies applying for egress traffic.""" - network_policy_id: Optional[str] = None """The unique identifier for the network policy.""" @@ -4208,7 +4208,7 @@ class NetworkPolicyEgress: consistent with [[com.databricks.api.proto.settingspolicy.EgressNetworkPolicy]]. Details see API-design: https://docs.google.com/document/d/1DKWO_FpZMCY4cF2O62LpwII1lx8gsnDGG-qgE3t3TOA/""" - network_access: Optional[EgressNetworkPolicyNetworkAccessPolicy] = None + network_access: EgressNetworkPolicyNetworkAccessPolicy """The access policy enforced for egress traffic to the internet.""" def as_dict(self) -> dict: diff --git a/databricks/sdk/service/settingsv2.py b/databricks/sdk/service/settingsv2.py new file mode 100755 index 000000000..7f69efeac --- /dev/null +++ b/databricks/sdk/service/settingsv2.py @@ -0,0 +1,236 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from typing import Any, Dict, Optional + +from ._internal import _from_dict + +_LOG = logging.getLogger("databricks.sdk") + + +# all definitions in this file are in alphabetical order + + +@dataclass +class BooleanMessage: + value: Optional[bool] = None + + def as_dict(self) -> dict: + """Serializes the BooleanMessage into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.value is not None: + body["value"] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the BooleanMessage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.value is not None: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> BooleanMessage: + """Deserializes the BooleanMessage from a dictionary.""" + return cls(value=d.get("value", None)) + + +@dataclass +class IntegerMessage: + value: Optional[int] = None + + def as_dict(self) -> dict: + """Serializes the IntegerMessage into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.value is not None: + body["value"] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the IntegerMessage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.value is not None: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> IntegerMessage: + """Deserializes the IntegerMessage from a dictionary.""" + return cls(value=d.get("value", None)) + + +@dataclass +class Setting: + boolean_val: Optional[BooleanMessage] = None + + effective_boolean_val: Optional[BooleanMessage] = None + + effective_integer_val: Optional[IntegerMessage] = None + + effective_string_val: Optional[StringMessage] = None + + integer_val: Optional[IntegerMessage] = None + + name: Optional[str] = None + """Name of the setting.""" + + string_val: Optional[StringMessage] = None + + def as_dict(self) -> dict: + """Serializes the Setting into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.boolean_val: + body["boolean_val"] = self.boolean_val.as_dict() + if self.effective_boolean_val: + body["effective_boolean_val"] = self.effective_boolean_val.as_dict() + if self.effective_integer_val: + body["effective_integer_val"] = self.effective_integer_val.as_dict() + if self.effective_string_val: + body["effective_string_val"] = self.effective_string_val.as_dict() + if self.integer_val: + body["integer_val"] = self.integer_val.as_dict() + if self.name is not None: + body["name"] = self.name + if self.string_val: + body["string_val"] = self.string_val.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Setting into a shallow dictionary of its immediate attributes.""" + body = {} + if self.boolean_val: + body["boolean_val"] = self.boolean_val + if self.effective_boolean_val: + body["effective_boolean_val"] = self.effective_boolean_val + if self.effective_integer_val: + body["effective_integer_val"] = self.effective_integer_val + if self.effective_string_val: + body["effective_string_val"] = self.effective_string_val + if self.integer_val: + body["integer_val"] = self.integer_val + if self.name is not None: + body["name"] = self.name + if self.string_val: + body["string_val"] = self.string_val + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Setting: + """Deserializes the Setting from a dictionary.""" + return cls( + boolean_val=_from_dict(d, "boolean_val", BooleanMessage), + effective_boolean_val=_from_dict(d, "effective_boolean_val", BooleanMessage), + effective_integer_val=_from_dict(d, "effective_integer_val", IntegerMessage), + effective_string_val=_from_dict(d, "effective_string_val", StringMessage), + integer_val=_from_dict(d, "integer_val", IntegerMessage), + name=d.get("name", None), + string_val=_from_dict(d, "string_val", StringMessage), + ) + + +@dataclass +class StringMessage: + value: Optional[str] = None + """Represents a generic string value.""" + + def as_dict(self) -> dict: + """Serializes the StringMessage into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.value is not None: + body["value"] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the StringMessage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.value is not None: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> StringMessage: + """Deserializes the StringMessage from a dictionary.""" + return cls(value=d.get("value", None)) + + +class AccountSettingsV2API: + """APIs to manage account level settings""" + + def __init__(self, api_client): + self._api = api_client + + def get_public_account_setting(self, name: str) -> Setting: + """Get a setting value at account level + + :param name: str + + :returns: :class:`Setting` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/accounts/{self._api.account_id}/settings/{name}", headers=headers) + return Setting.from_dict(res) + + def patch_public_account_setting(self, name: str, setting: Setting) -> Setting: + """Patch a setting value at account level + + :param name: str + :param setting: :class:`Setting` + + :returns: :class:`Setting` + """ + body = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.1/accounts/{self._api.account_id}/settings/{name}", body=body, headers=headers + ) + return Setting.from_dict(res) + + +class WorkspaceSettingsV2API: + """APIs to manage workspace level settings""" + + def __init__(self, api_client): + self._api = api_client + + def get_public_workspace_setting(self, name: str) -> Setting: + """Get a setting value at workspace level + + :param name: str + + :returns: :class:`Setting` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/settings/{name}", headers=headers) + return Setting.from_dict(res) + + def patch_public_workspace_setting(self, name: str, setting: Setting) -> Setting: + """Patch a setting value at workspace level + + :param name: str + :param setting: :class:`Setting` + + :returns: :class:`Setting` + """ + body = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.1/settings/{name}", body=body, headers=headers) + return Setting.from_dict(res) diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index 3cbb98dc9..1ff248c51 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -1134,6 +1134,15 @@ class PermissionsChange: """The principal whose privileges we are changing. Only one of principal or principal_id should be specified, never both at the same time.""" + principal_id: Optional[int] = None + """An opaque internal ID that identifies the principal whose privileges should be removed. + + This field is intended for removing privileges associated with a deleted user. When set, only + the entries specified in the remove field are processed; any entries in the add field will be + rejected. + + Only one of principal or principal_id should be specified, never both at the same time.""" + remove: Optional[List[str]] = None """The set of privileges to remove.""" @@ -1144,6 +1153,8 @@ def as_dict(self) -> dict: body["add"] = [v for v in self.add] if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = [v for v in self.remove] return body @@ -1155,6 +1166,8 @@ def as_shallow_dict(self) -> dict: body["add"] = self.add if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = self.remove return body @@ -1162,7 +1175,12 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: """Deserializes the PermissionsChange from a dictionary.""" - return cls(add=d.get("add", None), principal=d.get("principal", None), remove=d.get("remove", None)) + return cls( + add=d.get("add", None), + principal=d.get("principal", None), + principal_id=d.get("principal_id", None), + remove=d.get("remove", None), + ) class Privilege(Enum): @@ -1220,6 +1238,10 @@ class PrivilegeAssignment: """The principal (user email address or group name). For deleted principals, `principal` is empty while `principal_id` is populated.""" + principal_id: Optional[int] = None + """Unique identifier of the principal. For active principals, both `principal` and `principal_id` + are present.""" + privileges: Optional[List[Privilege]] = None """The privileges assigned to the principal.""" @@ -1228,6 +1250,8 @@ def as_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = [v.value for v in self.privileges] return body @@ -1237,6 +1261,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = self.privileges return body @@ -1244,7 +1270,11 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: """Deserializes the PrivilegeAssignment from a dictionary.""" - return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) + return cls( + principal=d.get("principal", None), + principal_id=d.get("principal_id", None), + privileges=_repeated_enum(d, "privileges", Privilege), + ) @dataclass diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py index fb8ea8f94..237b2e088 100755 --- a/databricks/sdk/service/vectorsearch.py +++ b/databricks/sdk/service/vectorsearch.py @@ -192,6 +192,9 @@ class DeltaSyncVectorIndexSpecRequest: columns from the source table are synced with the index. The primary key column and embedding source column or embedding vector column are always synced.""" + effective_budget_policy_id: Optional[str] = None + """The budget policy id applied to the vector search index""" + embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source.""" @@ -216,6 +219,8 @@ def as_dict(self) -> dict: body = {} if self.columns_to_sync: body["columns_to_sync"] = [v for v in self.columns_to_sync] + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] if self.embedding_vector_columns: @@ -233,6 +238,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.columns_to_sync: body["columns_to_sync"] = self.columns_to_sync + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = self.embedding_source_columns if self.embedding_vector_columns: @@ -250,6 +257,7 @@ def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecRequest: """Deserializes the DeltaSyncVectorIndexSpecRequest from a dictionary.""" return cls( columns_to_sync=d.get("columns_to_sync", None), + effective_budget_policy_id=d.get("effective_budget_policy_id", None), embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), embedding_writeback_table=d.get("embedding_writeback_table", None), @@ -260,6 +268,9 @@ def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecRequest: @dataclass class DeltaSyncVectorIndexSpecResponse: + effective_budget_policy_id: Optional[str] = None + """The budget policy id applied to the vector search index""" + embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source.""" @@ -285,6 +296,8 @@ class DeltaSyncVectorIndexSpecResponse: def as_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] if self.embedding_vector_columns: @@ -302,6 +315,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecResponse into a shallow dictionary of its immediate attributes.""" body = {} + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = self.embedding_source_columns if self.embedding_vector_columns: @@ -320,6 +335,7 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecResponse: """Deserializes the DeltaSyncVectorIndexSpecResponse from a dictionary.""" return cls( + effective_budget_policy_id=d.get("effective_budget_policy_id", None), embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), embedding_writeback_table=d.get("embedding_writeback_table", None), @@ -861,6 +877,60 @@ def from_dict(cls, d: Dict[str, Any]) -> QueryVectorIndexResponse: ) +@dataclass +class RerankerConfig: + model: Optional[str] = None + + parameters: Optional[RerankerConfigRerankerParameters] = None + + def as_dict(self) -> dict: + """Serializes the RerankerConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.model is not None: + body["model"] = self.model + if self.parameters: + body["parameters"] = self.parameters.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RerankerConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.model is not None: + body["model"] = self.model + if self.parameters: + body["parameters"] = self.parameters + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RerankerConfig: + """Deserializes the RerankerConfig from a dictionary.""" + return cls(model=d.get("model", None), parameters=_from_dict(d, "parameters", RerankerConfigRerankerParameters)) + + +@dataclass +class RerankerConfigRerankerParameters: + columns_to_rerank: Optional[List[str]] = None + + def as_dict(self) -> dict: + """Serializes the RerankerConfigRerankerParameters into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.columns_to_rerank: + body["columns_to_rerank"] = [v for v in self.columns_to_rerank] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RerankerConfigRerankerParameters into a shallow dictionary of its immediate attributes.""" + body = {} + if self.columns_to_rerank: + body["columns_to_rerank"] = self.columns_to_rerank + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RerankerConfigRerankerParameters: + """Deserializes the RerankerConfigRerankerParameters from a dictionary.""" + return cls(columns_to_rerank=d.get("columns_to_rerank", None)) + + @dataclass class ResultData: """Data returned in the query result.""" @@ -1597,20 +1667,27 @@ def delete_index(self, index_name: str): self._api.do("DELETE", f"/api/2.0/vector-search/indexes/{index_name}", headers=headers) - def get_index(self, index_name: str) -> VectorIndex: + def get_index(self, index_name: str, *, ensure_reranker_compatible: Optional[bool] = None) -> VectorIndex: """Get an index. :param index_name: str Name of the index + :param ensure_reranker_compatible: bool (optional) + If true, the URL returned for the index is guaranteed to be compatible with the reranker. Currently + this means we return the CP URL regardless of how the index is being accessed. If not set or set to + false, the URL may still be compatible with the reranker depending on what URL we return. :returns: :class:`VectorIndex` """ + query = {} + if ensure_reranker_compatible is not None: + query["ensure_reranker_compatible"] = ensure_reranker_compatible headers = { "Accept": "application/json", } - res = self._api.do("GET", f"/api/2.0/vector-search/indexes/{index_name}", headers=headers) + res = self._api.do("GET", f"/api/2.0/vector-search/indexes/{index_name}", query=query, headers=headers) return VectorIndex.from_dict(res) def list_indexes(self, endpoint_name: str, *, page_token: Optional[str] = None) -> Iterator[MiniVectorIndex]: @@ -1653,6 +1730,7 @@ def query_index( query_text: Optional[str] = None, query_type: Optional[str] = None, query_vector: Optional[List[float]] = None, + reranker: Optional[RerankerConfig] = None, score_threshold: Optional[float] = None, ) -> QueryVectorIndexResponse: """Query the specified vector index. @@ -1680,6 +1758,7 @@ def query_index( :param query_vector: List[float] (optional) Query vector. Required for Direct Vector Access Index and Delta Sync Index using self-managed vectors. + :param reranker: :class:`RerankerConfig` (optional) :param score_threshold: float (optional) Threshold for the approximate nearest neighbor search. Defaults to 0.0. @@ -1700,6 +1779,8 @@ def query_index( body["query_type"] = query_type if query_vector is not None: body["query_vector"] = [v for v in query_vector] + if reranker is not None: + body["reranker"] = reranker.as_dict() if score_threshold is not None: body["score_threshold"] = score_threshold headers = {